public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format, MediaPortal.Core.Transcoding.Quality quality, Standard standard) { if (!Supports(format)) { return(false); } string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg") { Log.Info("TSReader2MP4: wrong file format"); return(false); } try { graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2MP4: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2MP4: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader"); Cleanup(); return(false); } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2MP4: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: unable to add mpeg2 video decoder"); Cleanup(); return(false); } } else { Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec"); Cleanup(); return(false); } } if (usingAAC == false) { Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return(false); } } else { Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec"); Cleanup(); return(false); } } Log.Info("TSReader2MP4: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return(false); } //add encoders, muxer & filewriter if (!AddCodecs(graphBuilder, info)) { return(false); } //setup graph controls mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration Log.Info("TSReader2MP4: Get duration of recording"); long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration)); //run the graph to initialize the filters to be sure hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) { break; } maxCount--; if (maxCount <= 0) { break; } } mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); graphBuilder.RemoveFilter(mp4Muxer); graphBuilder.RemoveFilter(h264Encoder); graphBuilder.RemoveFilter(aacEncoder); graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter); if (!AddCodecs(graphBuilder, info)) { return(false); } //Set Encoder quality & Muxer settings if (!EncoderSet(graphBuilder, info)) { return(false); } //start transcoding - run the graph Log.Info("TSReader2MP4: start transcoding"); //setup flow control //need to leverage CBAsePin, CPullPin & IAsyncReader methods. IAsyncReader synchVideo = null; mediaSample = VideoCodec as IMediaSample; hr = synchVideo.SyncReadAligned(mediaSample); //So we only parse decoder output whent the encoders are ready. hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } } catch (Exception ex) { Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message); Cleanup(); return(false); } return(true); }
public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard) { try { if (!Supports(format)) { return(false); } string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLowerInvariant() != ".ts" && ext.ToLowerInvariant() != ".mpg") { Log.Info("TSReader2WMV: wrong file format"); return(false); } Log.Info("TSReader2WMV: create graph"); graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2WMV: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; //ITSReader ireader = (ITSReader)reader; //ireader.SetTsReaderCallback(this); //ireader.SetRequestAudioChangeCallback(this); IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2WMV: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2WMV: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2WMV: FAILED: unable to get output pins of tsreader"); Cleanup(); return(false); } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2WMV: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2WMV: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2WMV: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2WMV: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2WMV: unable to add mpeg2 video decoder"); Cleanup(); return(false); } } else { Log.Info("TSReader2WMV: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add h264 video codec"); Cleanup(); return(false); } } if (usingAAC == false) { Log.Info("TSReader2WMV: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return(false); } } else { Log.Info("TSReader2WMV: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add aac audio codec"); Cleanup(); return(false); } } Log.Info("TSReader2WMV: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2WMV: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return(false); } string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv"); if (!AddWmAsfWriter(outputFilename, quality, standard)) { return(false); } Log.Info("TSReader2WMV: start pre-run"); mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration)); hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) { break; } maxCount--; if (maxCount <= 0) { break; } } Log.Info("TSReader2WMV: pre-run done"); Log.Info("TSReader2WMV: Get duration of movie"); mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); Log.Info("TSReader2WMV: reconnect mpeg2 video codec->ASF WM Writer"); graphBuilder.RemoveFilter(fileWriterbase); if (!AddWmAsfWriter(outputFilename, quality, standard)) { return(false); } Log.Info("TSReader2WMV: Start transcoding"); hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2WMV:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } } catch (Exception e) { // TODO: Handle exceptions. Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message); return(false); } return(true); }
/// <summary> create the used COM components and get the interfaces. </summary> protected override bool GetInterfaces(string filename) { Log.Info("TSReaderPlayer: GetInterfaces()"); try { string strAudioRenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters LoadMyTvFilterSettings(ref intFilters, ref strFilters, ref strVideoCodec, ref strAudioCodec, ref strAACAudioCodec, ref strDDPLUSAudioCodec, ref strH264VideoCodec, ref strAudioRenderer, ref enableDVBBitmapSubtitles, ref enableDVBTtxtSubtitles, ref relaxTsReader); _graphBuilder = (IGraphBuilder)new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)_graphBuilder); if (strAudioRenderer.Length > 0) //audio renderer must be in graph before audio switcher { _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, strAudioRenderer, true); } #region add AudioSwitcher if (enableMPAudioSwitcher) //audio switcher must be in graph before tsreader audiochangecallback { _audioSwitcherFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, "MediaPortal AudioSwitcher"); if (_audioSwitcherFilter == null) { Log.Error("TSReaderPlayer: Failed to add AudioSwitcher to graph"); } } #endregion #region add TsReader TsReader reader = new TsReader(); _fileSource = (IBaseFilter)reader; _ireader = (ITSReader)reader; _interfaceTSReader = _fileSource; _ireader.SetRelaxedMode(relaxTsReader); // enable/disable continousity filtering _ireader.SetTsReaderCallback(this); _ireader.SetRequestAudioChangeCallback(this); Log.Info("TSReaderPlayer: Add TsReader to graph"); int hr = _graphBuilder.AddFilter((IBaseFilter)_fileSource, "TsReader"); DsError.ThrowExceptionForHR(hr); #endregion #region load file in TsReader IFileSourceFilter interfaceFile = (IFileSourceFilter)_fileSource; if (interfaceFile == null) { Log.Error("TSReaderPlayer: Failed to get IFileSourceFilter"); Cleanup(); return false; } Log.Info("TSReaderPlayer: Open file: {0}", filename); hr = interfaceFile.Load(filename, null); if (hr != 0) { Log.Error("TSReaderPlayer: Failed to open file:{0} :0x{1:x}", filename, hr); Cleanup(); return false; } #endregion #region add codecs Log.Info("TSReaderPlayer: Add codecs"); // add preferred video & audio codecs MatchFilters("Video"); MatchFilters("Audio"); // does .ts file contain video? // default is _isRadio=false which prevents recorded radio file playing if (!_videoFormat.IsValid) _isRadio = true; if (!_isRadio) { _vmr9 = new VMR9Util(); _vmr9.AddVMR9(_graphBuilder); _vmr9.Enable(false); DirectShowUtil.AddFilterToGraph(_graphBuilder, videoFilter); if (enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().AddSubtitleFilter(_graphBuilder); } catch (Exception e) { Log.Error(e); } } } DirectShowUtil.AddFilterToGraph(_graphBuilder, audioFilter); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(_graphBuilder, arrFilters[i]); } #endregion #region PostProcessingEngine Detection IPostProcessingEngine postengine = PostProcessingEngine.GetInstance(true); if (!postengine.LoadPostProcessing(_graphBuilder)) { PostProcessingEngine.engine = new PostProcessingEngine.DummyEngine(); } #endregion #region render TsReader output pins Log.Info("TSReaderPlayer: Render TsReader outputs"); if (_isRadio) { IEnumPins enumPins; hr = _fileSource.EnumPins(out enumPins); DsError.ThrowExceptionForHR(hr); IPin[] pins = new IPin[1]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Output) { IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { hr = _graphBuilder.Render(pins[0]); DsError.ThrowExceptionForHR(hr); break; } } } DirectShowUtil.ReleaseComObject(pins[0]); } DirectShowUtil.ReleaseComObject(enumPins); } else { DirectShowUtil.RenderGraphBuilderOutputPins(_graphBuilder, _fileSource); } DirectShowUtil.RemoveUnusedFiltersFromGraph(_graphBuilder); #endregion _mediaCtrl = (IMediaControl)_graphBuilder; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = (IMediaSeeking)_graphBuilder; if (_mediaSeeking == null) { Log.Error("TSReaderPlayer: Unable to get IMediaSeeking interface"); } _audioStream = (IAudioStream)_fileSource; if (_audioStream == null) { Log.Error("TSReaderPlayer: Unable to get IAudioStream interface"); } _audioSelector = new AudioSelector(_audioStream); if (!_isRadio) { if (enableDVBTtxtSubtitles || enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().SetPlayer(this); _dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } if (enableDVBBitmapSubtitles) { _subtitleStream = (ISubtitleStream)_fileSource; if (_subtitleStream == null) { Log.Error("TSReaderPlayer: Unable to get ISubtitleStream interface"); } } if (enableDVBTtxtSubtitles) { //Log.Debug("TSReaderPlayer: Obtaining TeletextSource"); _teletextSource = (ITeletextSource)_fileSource; if (_teletextSource == null) { Log.Error("TSReaderPlayer: Unable to get ITeletextSource interface"); } Log.Debug("TSReaderPlayer: Creating Teletext Receiver"); try { using (MPSettings xmlreader = new MPSettings()) xmlreader.SetValue("tvservice", "dvbdefttxtsubtitles", "999;999"); } catch { } TeletextSubtitleDecoder ttxtDecoder = new TeletextSubtitleDecoder(_dvbSubRenderer); _ttxtReceiver = new TeletextReceiver(_teletextSource, ttxtDecoder); // regardless of whether dvb subs are enabled, the following call is okay // if _subtitleStream is null the subtitle will just not setup for bitmap subs _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, ttxtDecoder); } else if (enableDVBBitmapSubtitles) { // if only dvb subs are enabled, pass null for ttxtDecoder _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, null); } } if (_audioRendererFilter != null) { //Log.Info("TSReaderPlayer:set reference clock"); IMediaFilter mp = (IMediaFilter)_graphBuilder; IReferenceClock clock = (IReferenceClock)_audioRendererFilter; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); //Log.Info("TSReaderPlayer:set reference clock:{0:X}", hr); _basicAudio = (IBasicAudio)_graphBuilder; } if (!_isRadio) { IBaseFilter basefilter; _graphBuilder.FindFilterByName("Line 21 Decoder", out basefilter); if (basefilter == null) { _graphBuilder.FindFilterByName("Line21 Decoder", out basefilter); } if (basefilter == null) { _graphBuilder.FindFilterByName("Line 21 Decoder 2", out basefilter); } if (basefilter != null) { Log.Info("TSreaderPlayer: Line21 Decoder (Closed Captions), in use"); //: {0}", showClosedCaptions); _line21Decoder = (IAMLine21Decoder)basefilter; if (_line21Decoder != null) { AMLine21CCState state = AMLine21CCState.Off; hr = _line21Decoder.SetServiceState(state); if (hr == 0) { Log.Info("TSReaderPlayer: Closed Captions state change successful"); } else { Log.Info("TSReaderPlayer: Failed to change Closed Captions state"); } } } if (!_vmr9.IsVMR9Connected) { Log.Error("TSReaderPlayer: Failed vmr9 not connected"); Cleanup(); return false; } DirectShowUtil.EnableDeInterlace(_graphBuilder); _vmr9.SetDeinterlaceMode(); } using (MPSettings xmlreader = new MPSettings()) { int lastSubIndex = xmlreader.GetValueAsInt("tvservice", "lastsubtitleindex", 0); Log.Debug("TSReaderPlayer: Last subtitle index: {0}", lastSubIndex); CurrentSubtitleStream = lastSubIndex; } return true; } catch (Exception ex) { Log.Error("TSReaderPlayer: Exception while creating DShow graph {0}", ex.Message); Cleanup(); return false; } }
public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard) { try { if (!Supports(format)) return false; string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg") { Log.Info("TSReader2WMV: wrong file format"); return false; } Log.Info("TSReader2WMV: create graph"); graphBuilder = (IGraphBuilder)new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2WMV: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; //ITSReader ireader = (ITSReader)reader; //ireader.SetTsReaderCallback(this); //ireader.SetRequestAudioChangeCallback(this); IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2WMV: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2WMV: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2WMV: FAILED: unable to get output pins of tsreader"); Cleanup(); return false; } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2WMV: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2WMV: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2WMV: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2WMV: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2WMV: unable to add mpeg2 video decoder"); Cleanup(); return false; } } else { Log.Info("TSReader2WMV: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add h264 video codec"); Cleanup(); return false; } } if (usingAAC == false) { Log.Info("TSReader2WMV: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return false; } } else { Log.Info("TSReader2WMV: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add aac audio codec"); Cleanup(); return false; } } Log.Info("TSReader2WMV: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2WMV: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return false; } string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv"); if (!AddWmAsfWriter(outputFilename, quality, standard)) return false; Log.Info("TSReader2WMV: start pre-run"); mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration)); hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return false; } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) break; maxCount--; if (maxCount <= 0) break; } Log.Info("TSReader2WMV: pre-run done"); Log.Info("TSReader2WMV: Get duration of movie"); mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); Log.Info("TSReader2WMV: reconnect mpeg2 video codec->ASF WM Writer"); graphBuilder.RemoveFilter(fileWriterbase); if (!AddWmAsfWriter(outputFilename, quality, standard)) return false; Log.Info("TSReader2WMV: Start transcoding"); hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2WMV:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } } catch (Exception e) { // TODO: Handle exceptions. Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message); return false; } return true; }
public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format, MediaPortal.Core.Transcoding.Quality quality, Standard standard) { if (!Supports(format)) return false; string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLowerInvariant() != ".ts" && ext.ToLowerInvariant() != ".mpg") { Log.Info("TSReader2MP4: wrong file format"); return false; } try { graphBuilder = (IGraphBuilder)new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2MP4: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2MP4: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader"); Cleanup(); return false; } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2MP4: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: unable to add mpeg2 video decoder"); Cleanup(); return false; } } else { Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec"); Cleanup(); return false; } } if (usingAAC == false) { Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return false; } } else { Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec"); Cleanup(); return false; } } Log.Info("TSReader2MP4: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return false; } //add encoders, muxer & filewriter if (!AddCodecs(graphBuilder, info)) return false; //setup graph controls mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration Log.Info("TSReader2MP4: Get duration of recording"); long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration)); //run the graph to initialize the filters to be sure hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return false; } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) break; maxCount--; if (maxCount <= 0) break; } mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); graphBuilder.RemoveFilter(mp4Muxer); graphBuilder.RemoveFilter(h264Encoder); graphBuilder.RemoveFilter(aacEncoder); graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter); if (!AddCodecs(graphBuilder, info)) return false; //Set Encoder quality & Muxer settings if (!EncoderSet(graphBuilder, info)) return false; //start transcoding - run the graph Log.Info("TSReader2MP4: start transcoding"); //setup flow control //need to leverage CBAsePin, CPullPin & IAsyncReader methods. IAsyncReader synchVideo = null; mediaSample = VideoCodec as IMediaSample; hr = synchVideo.SyncReadAligned(mediaSample); //So we only parse decoder output whent the encoders are ready. hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } } catch (Exception ex) { Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message); Cleanup(); return false; } return true; }
public bool Play(string fileName, Form form) { _form = form; Log.WriteFile("play:{0}", fileName); _graphBuilder = (IFilterGraph2)new FilterGraph(); _rotEntry = new DsROTEntry(_graphBuilder); TsReader reader = new TsReader(); _tsReader = (IBaseFilter)reader; Log.Info("TSReaderPlayer:add TsReader to graph"); _graphBuilder.AddFilter(_tsReader, "TsReader"); #region load file in TsReader Log.WriteFile("load file in Ts"); IFileSourceFilter interfaceFile = (IFileSourceFilter)_tsReader; if (interfaceFile == null) { Log.WriteFile("TSReaderPlayer:Failed to get IFileSourceFilter"); return false; } int hr = interfaceFile.Load(fileName, null); if (hr != 0) { Log.WriteFile("TSReaderPlayer:Failed to load file"); return false; } #endregion #region render pin Log.Info("TSReaderPlayer:render TsReader outputs"); IEnumPins enumPins; _tsReader.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) break; PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { Release.ComObject(pins[0]); continue; } _graphBuilder.Render(pins[0]); Release.ComObject(pins[0]); } Release.ComObject(enumPins); #endregion _videoWin = _graphBuilder as IVideoWindow; if (_videoWin != null) { _videoWin.put_Visible(OABool.True); _videoWin.put_Owner(form.Handle); _videoWin.put_WindowStyle( (WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipSiblings + (int)WindowStyle.ClipChildren)); _videoWin.put_MessageDrain(form.Handle); _videoWin.SetWindowPosition(form.ClientRectangle.X, form.ClientRectangle.Y, form.ClientRectangle.Width, form.ClientRectangle.Height); } Log.WriteFile("run graph"); _mediaCtrl = (IMediaControl)_graphBuilder; hr = _mediaCtrl.Run(); Log.WriteFile("TSReaderPlayer:running:{0:X}", hr); return true; }
/// <summary> create the used COM components and get the interfaces. </summary> protected override bool GetInterfaces(string filename) { Log.Info("TSReaderPlayer: GetInterfaces()"); try { _graphBuilder = (IGraphBuilder)new FilterGraphNoThread(); _rotEntry = new DsROTEntry((IFilterGraph)_graphBuilder); //Get filterCodecName and filterConfig filterConfig = GetFilterConfiguration(); filterCodec = GetFilterCodec(); #region add AudioRenderer //Add Audio Renderer AudioRendererAdd(); #endregion #region add AudioSwitcher MPAudioSwitcherAdd(); #endregion #region add TsReader TsReader reader = new TsReader(); _fileSource = (IBaseFilter)reader; _ireader = (ITSReader)reader; _interfaceTSReader = _fileSource; if (filterConfig != null) _ireader.SetRelaxedMode(filterConfig.relaxTsReader); // enable/disable continousity filtering _ireader.SetTsReaderCallback(this); _ireader.SetRequestAudioChangeCallback(this); Log.Info("TSReaderPlayer: Add TsReader to graph"); int hr = _graphBuilder.AddFilter((IBaseFilter)_fileSource, "TsReader"); DsError.ThrowExceptionForHR(hr); #endregion #region load file in TsReader IFileSourceFilter interfaceFile = (IFileSourceFilter)_fileSource; if (interfaceFile == null) { Log.Error("TSReaderPlayer: Failed to get IFileSourceFilter"); Cleanup(); return false; } Log.Info("TSReaderPlayer: Open file: {0}", filename); hr = interfaceFile.Load(filename, null); if (hr != 0) { Log.Error("TSReaderPlayer: Failed to open file:{0} :0x{1:x}", filename, hr); Cleanup(); return false; } #endregion #region add codecs Log.Info("TSReaderPlayer: Add codecs"); // does .ts file contain video? // default is _isRadio=false which prevents recorded radio file playing if (!_videoFormat.IsValid && g_Player.AudioStreams == 1) { Log.Debug("TSReaderPlayer: Stream is Radio"); _isRadio = true; } if (!_isRadio) { if (_videoFormat.IsValid) { _vmr9 = VMR9Util.g_vmr9 = new VMR9Util(); bool AddVMR9 = VMR9Util.g_vmr9.AddVMR9(_graphBuilder); if (!AddVMR9) { Log.Error("TSReaderPlayer:Failed to add VMR9 to graph"); return false; } VMR9Util.g_vmr9.Enable(false); } // Add preferred video filters UpdateFilters("Video"); Log.Debug("TSReaderPlayer: UpdateFilters Video done"); if (filterConfig != null && filterConfig.enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().AddSubtitleFilter(_graphBuilder); Log.Debug("TSReaderPlayer: SubtitleRenderer AddSubtitleFilter"); } catch (Exception e) { Log.Error(e); } } } // Add preferred audio filters UpdateFilters("Audio"); Log.Debug("TSReaderPlayer: UpdateFilters Audio done"); #endregion #region PostProcessingEngine Detection IPostProcessingEngine postengine = PostProcessingEngine.GetInstance(true); if (!postengine.LoadPostProcessing(_graphBuilder)) { PostProcessingEngine.engine = new PostProcessingEngine.DummyEngine(); Log.Debug("TSReaderPlayer: PostProcessingEngine to DummyEngine"); } #endregion #region render TsReader output pins Log.Info("TSReaderPlayer: Render TsReader outputs"); if (_isRadio && g_Player.AudioStreams == 1) { IEnumPins enumPins; hr = _fileSource.EnumPins(out enumPins); DsError.ThrowExceptionForHR(hr); IPin[] pins = new IPin[1]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Output) { IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { hr = _graphBuilder.Render(pins[0]); DsError.ThrowExceptionForHR(hr); break; } } } DirectShowUtil.ReleaseComObject(pins[0]); } DirectShowUtil.ReleaseComObject(enumPins); } else { DirectShowUtil.RenderGraphBuilderOutputPins(_graphBuilder, _fileSource); if (filterConfig != null && !filterConfig.enableCCSubtitles) { CleanupCC(); Log.Debug("TSReaderPlayer: CleanupCC filter (Tv/Recorded Stream Detected)"); } } DirectShowUtil.RemoveUnusedFiltersFromGraph(_graphBuilder); #endregion _mediaCtrl = (IMediaControl)_graphBuilder; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = (IMediaSeeking)_graphBuilder; if (_mediaSeeking == null) { Log.Error("TSReaderPlayer: Unable to get IMediaSeeking interface"); } _audioStream = (IAudioStream)_fileSource; if (_audioStream == null) { Log.Error("TSReaderPlayer: Unable to get IAudioStream interface"); } _audioSelector = new AudioSelector(_audioStream); if (!_isRadio) { if (filterConfig != null && (filterConfig != null && filterConfig.enableDVBTtxtSubtitles || filterConfig.enableDVBBitmapSubtitles)) { try { SubtitleRenderer.GetInstance().SetPlayer(this); _dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } if (filterConfig != null && filterConfig.enableDVBBitmapSubtitles) { _subtitleStream = (ISubtitleStream)_fileSource; if (_subtitleStream == null) { Log.Error("TSReaderPlayer: Unable to get ISubtitleStream interface"); } } if (filterConfig != null && filterConfig.enableDVBTtxtSubtitles) { //Log.Debug("TSReaderPlayer: Obtaining TeletextSource"); _teletextSource = (ITeletextSource)_fileSource; if (_teletextSource == null) { Log.Error("TSReaderPlayer: Unable to get ITeletextSource interface"); } Log.Debug("TSReaderPlayer: Creating Teletext Receiver"); try { using (MPSettings xmlreader = new MPSettings()) xmlreader.SetValue("tvservice", "dvbdefttxtsubtitles", "999;999"); } catch { } TeletextSubtitleDecoder ttxtDecoder = new TeletextSubtitleDecoder(_dvbSubRenderer); _ttxtReceiver = new TeletextReceiver(_teletextSource, ttxtDecoder); // regardless of whether dvb subs are enabled, the following call is okay // if _subtitleStream is null the subtitle will just not setup for bitmap subs _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, ttxtDecoder); } else if (filterConfig != null && filterConfig.enableDVBBitmapSubtitles) { // if only dvb subs are enabled, pass null for ttxtDecoder _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, null); } } if (filterCodec._audioRendererFilter != null) { //Log.Info("TSReaderPlayer:set reference clock"); /*IMediaFilter mp = (IMediaFilter)_graphBuilder; IReferenceClock clock = (IReferenceClock)filterCodec._audioRendererFilter; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock);*/ //Log.Info("TSReaderPlayer:set reference clock:{0:X}", hr); SyncAudioRenderer(); _basicAudio = (IBasicAudio)_graphBuilder; } if (!_isRadio) { if (VMR9Util.g_vmr9 != null && filterConfig != null && filterConfig.enableCCSubtitles) { CleanupCC(); ReleaseCC(); ReleaseCC2(); CoreCCParserCheck(); DirectShowUtil.RenderUnconnectedOutputPins(_graphBuilder, filterCodec.VideoCodec); Log.Debug("TSReaderPlayer: Render VideoCodec filter (Tv/Recorded Stream Detected)"); EnableCC(); Log.Debug("TSReaderPlayer: EnableCC"); if (CoreCCPresent) { DirectShowUtil.RenderUnconnectedOutputPins(_graphBuilder, filterCodec.CoreCCParser); Log.Debug("TSReaderPlayer: Render CoreCCParser filter (Tv/Recorded Stream Detected)"); EnableCC2(); Log.Debug("TSReaderPlayer: EnableCC2"); } } if (VMR9Util.g_vmr9 != null && !VMR9Util.g_vmr9.IsVMR9Connected) { Log.Error("TSReaderPlayer: Failed vmr9 not connected"); Cleanup(); return false; } DirectShowUtil.EnableDeInterlace(_graphBuilder); if (VMR9Util.g_vmr9 != null) { VMR9Util.g_vmr9.SetDeinterlaceMode(); } } using (MPSettings xmlreader = new MPSettings()) { if (filterConfig.autoShowSubWhenTvStarts && SupportsCC && CurrentSubtitleStream == 0) { CurrentSubtitleStream = -1; } else { int lastSubIndex = xmlreader.GetValueAsInt("tvservice", "lastsubtitleindex", 0); Log.Debug("TSReaderPlayer: Last subtitle index: {0}", lastSubIndex); CurrentSubtitleStream = lastSubIndex; } } if (filterConfig != null && !filterConfig.autoShowSubWhenTvStarts) { Log.Debug("TSReaderPlayer: Automatically show subtitles when TV starts is set to {0}", filterConfig.autoShowSubWhenTvStarts); EnableSubtitle = filterConfig.autoShowSubWhenTvStarts; } return true; } catch (Exception ex) { Log.Error("TSReaderPlayer: Exception while creating DShow graph {0}", ex.Message); Cleanup(); return false; } }
public bool Play(string fileName, Form form) { _form = form; Log.WriteFile("play:{0}", fileName); _graphBuilder = (IFilterGraph2) new FilterGraph(); _rotEntry = new DsROTEntry(_graphBuilder); TsReader reader = new TsReader(); _tsReader = (IBaseFilter)reader; Log.Info("TSReaderPlayer:add TsReader to graph"); _graphBuilder.AddFilter(_tsReader, "TsReader"); #region load file in TsReader Log.WriteFile("load file in Ts"); IFileSourceFilter interfaceFile = (IFileSourceFilter)_tsReader; if (interfaceFile == null) { Log.WriteFile("TSReaderPlayer:Failed to get IFileSourceFilter"); return(false); } int hr = interfaceFile.Load(fileName, null); if (hr != 0) { Log.WriteFile("TSReaderPlayer:Failed to load file"); return(false); } #endregion #region render pin Log.Info("TSReaderPlayer:render TsReader outputs"); IEnumPins enumPins; _tsReader.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { Release.ComObject(pins[0]); continue; } _graphBuilder.Render(pins[0]); Release.ComObject(pins[0]); } Release.ComObject(enumPins); #endregion _videoWin = _graphBuilder as IVideoWindow; if (_videoWin != null) { _videoWin.put_Visible(OABool.True); _videoWin.put_Owner(form.Handle); _videoWin.put_WindowStyle( (WindowStyle)((int)WindowStyle.Child + (int)WindowStyle.ClipSiblings + (int)WindowStyle.ClipChildren)); _videoWin.put_MessageDrain(form.Handle); _videoWin.SetWindowPosition(form.ClientRectangle.X, form.ClientRectangle.Y, form.ClientRectangle.Width, form.ClientRectangle.Height); } Log.WriteFile("run graph"); _mediaCtrl = (IMediaControl)_graphBuilder; hr = _mediaCtrl.Run(); Log.WriteFile("TSReaderPlayer:running:{0:X}", hr); return(true); }
/// <summary> create the used COM components and get the interfaces. </summary> protected virtual bool GetInterfaces(string filename) { int hr; Log.Info("TSReaderPlayer:GetInterfaces()"); //Type comtype = null; object comobj = null; try { _graphBuilder = (IGraphBuilder)new FilterGraph(); TsReader reader = new TsReader(); _fileSource = (IBaseFilter)reader; ((ITSReader)reader).SetTsReaderCallback(this); ((ITSReader)reader).SetRequestAudioChangeCallback(this); IBaseFilter filter = (IBaseFilter)_fileSource; _graphBuilder.AddFilter(filter, "TsReader"); IFileSourceFilter interFaceFile = (IFileSourceFilter)_fileSource; interFaceFile.Load(filename, null); // add preferred video & audio codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; string strDDPLUSAudioCodec = ""; string strAudiorenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters using (Settings xmlreader = new MPSettings()) { _CodecSupportsFastSeeking = xmlreader.GetValueAsBool("debug", "CodecSupportsFastSeeking", true); Log.Debug("BaseTSReaderPlayer: Codec supports fast seeking = {0}", _CodecSupportsFastSeeking); // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString("mytv", "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool("mytv", "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString("mytv", "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strDDPLUSAudioCodec = xmlreader.GetValueAsString("mytv", "ddplusaudiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); string strValue = xmlreader.GetValueAsString("mytv", "defaultar", "Normal"); GUIGraphicsContext.ARType = Util.Utils.GetAspectRatio(strValue); } if (strVideoCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(_graphBuilder, strVideoCodec); } if (strH264VideoCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(_graphBuilder, strH264VideoCodec); } if (strAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(_graphBuilder, strAudioCodec); } if (strAACAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(_graphBuilder, strAACAudioCodec); } if (strDDPLUSAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(_graphBuilder, strDDPLUSAudioCodec); } if (strAudiorenderer.Length > 0) { _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, strAudiorenderer, false); } // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(_graphBuilder, arrFilters[i]); } DirectShowUtil.RenderOutputPins(_graphBuilder, (IBaseFilter)_fileSource); _mediaCtrl = (IMediaControl)_graphBuilder; _videoWin = _graphBuilder as IVideoWindow; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = _graphBuilder as IMediaSeeking; if (_mediaSeeking == null) { Log.Error("Unable to get IMediaSeeking interface#1"); } if (_audioRendererFilter != null) { IMediaFilter mp = _graphBuilder as IMediaFilter; IReferenceClock clock = _audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(clock); } _basicVideo = _graphBuilder as IBasicVideo2; _basicAudio = _graphBuilder as IBasicAudio; //Log.Info("TSReaderPlayer:SetARMode"); DirectShowUtil.SetARMode(_graphBuilder, AspectRatioMode.Stretched); _graphBuilder.SetDefaultSyncSource(); //Log.Info("TSReaderPlayer: set Deinterlace"); //Log.Info("TSReaderPlayer: done"); return true; } catch (Exception ex) { Log.Error("TSReaderPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); return false; } finally { if (comobj != null) { DirectShowUtil.ReleaseComObject(comobj); } comobj = null; } }
/// <summary> create the used COM components and get the interfaces. </summary> protected override bool GetInterfaces(string filename) { Log.Info("TSReaderPlayer: GetInterfaces()"); try { string strAudioRenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters LoadMyTvFilterSettings(ref intFilters, ref strFilters, ref strVideoCodec, ref strAudioCodec, ref strAACAudioCodec, ref strDDPLUSAudioCodec, ref strH264VideoCodec, ref strAudioRenderer, ref enableDVBBitmapSubtitles, ref enableDVBTtxtSubtitles, ref relaxTsReader); _graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)_graphBuilder); if (strAudioRenderer.Length > 0) //audio renderer must be in graph before audio switcher { _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, strAudioRenderer, true); } #region add AudioSwitcher if (enableMPAudioSwitcher) //audio switcher must be in graph before tsreader audiochangecallback { _audioSwitcherFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, "MediaPortal AudioSwitcher"); if (_audioSwitcherFilter == null) { Log.Error("TSReaderPlayer: Failed to add AudioSwitcher to graph"); } } #endregion #region add TsReader TsReader reader = new TsReader(); _fileSource = (IBaseFilter)reader; _ireader = (ITSReader)reader; _interfaceTSReader = _fileSource; _ireader.SetRelaxedMode(relaxTsReader); // enable/disable continousity filtering _ireader.SetTsReaderCallback(this); _ireader.SetRequestAudioChangeCallback(this); Log.Info("TSReaderPlayer: Add TsReader to graph"); int hr = _graphBuilder.AddFilter((IBaseFilter)_fileSource, "TsReader"); DsError.ThrowExceptionForHR(hr); #endregion #region load file in TsReader IFileSourceFilter interfaceFile = (IFileSourceFilter)_fileSource; if (interfaceFile == null) { Log.Error("TSReaderPlayer: Failed to get IFileSourceFilter"); Cleanup(); return(false); } Log.Info("TSReaderPlayer: Open file: {0}", filename); hr = interfaceFile.Load(filename, null); if (hr != 0) { Log.Error("TSReaderPlayer: Failed to open file:{0} :0x{1:x}", filename, hr); Cleanup(); return(false); } #endregion #region add codecs Log.Info("TSReaderPlayer: Add codecs"); // add preferred video & audio codecs MatchFilters("Video"); MatchFilters("Audio"); // does .ts file contain video? // default is _isRadio=false which prevents recorded radio file playing if (!_videoFormat.IsValid) { _isRadio = true; } if (!_isRadio) { _vmr9 = new VMR9Util(); _vmr9.AddVMR9(_graphBuilder); _vmr9.Enable(false); DirectShowUtil.AddFilterToGraph(_graphBuilder, videoFilter); if (enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().AddSubtitleFilter(_graphBuilder); } catch (Exception e) { Log.Error(e); } } } DirectShowUtil.AddFilterToGraph(_graphBuilder, audioFilter); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(_graphBuilder, arrFilters[i]); } #endregion #region PostProcessingEngine Detection IPostProcessingEngine postengine = PostProcessingEngine.GetInstance(true); if (!postengine.LoadPostProcessing(_graphBuilder)) { PostProcessingEngine.engine = new PostProcessingEngine.DummyEngine(); } #endregion #region render TsReader output pins Log.Info("TSReaderPlayer: Render TsReader outputs"); if (_isRadio) { IEnumPins enumPins; hr = _fileSource.EnumPins(out enumPins); DsError.ThrowExceptionForHR(hr); IPin[] pins = new IPin[1]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Output) { IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { hr = _graphBuilder.Render(pins[0]); DsError.ThrowExceptionForHR(hr); break; } } } DirectShowUtil.ReleaseComObject(pins[0]); } DirectShowUtil.ReleaseComObject(enumPins); } else { DirectShowUtil.RenderGraphBuilderOutputPins(_graphBuilder, _fileSource); } DirectShowUtil.RemoveUnusedFiltersFromGraph(_graphBuilder); #endregion _mediaCtrl = (IMediaControl)_graphBuilder; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = (IMediaSeeking)_graphBuilder; if (_mediaSeeking == null) { Log.Error("TSReaderPlayer: Unable to get IMediaSeeking interface"); } _audioStream = (IAudioStream)_fileSource; if (_audioStream == null) { Log.Error("TSReaderPlayer: Unable to get IAudioStream interface"); } _audioSelector = new AudioSelector(_audioStream); if (!_isRadio) { if (enableDVBTtxtSubtitles || enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().SetPlayer(this); _dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } if (enableDVBBitmapSubtitles) { _subtitleStream = (ISubtitleStream)_fileSource; if (_subtitleStream == null) { Log.Error("TSReaderPlayer: Unable to get ISubtitleStream interface"); } } if (enableDVBTtxtSubtitles) { //Log.Debug("TSReaderPlayer: Obtaining TeletextSource"); _teletextSource = (ITeletextSource)_fileSource; if (_teletextSource == null) { Log.Error("TSReaderPlayer: Unable to get ITeletextSource interface"); } Log.Debug("TSReaderPlayer: Creating Teletext Receiver"); TeletextSubtitleDecoder ttxtDecoder = new TeletextSubtitleDecoder(_dvbSubRenderer); _ttxtReceiver = new TeletextReceiver(_teletextSource, ttxtDecoder); // regardless of whether dvb subs are enabled, the following call is okay // if _subtitleStream is null the subtitle will just not setup for bitmap subs _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, ttxtDecoder); } else if (enableDVBBitmapSubtitles) { // if only dvb subs are enabled, pass null for ttxtDecoder _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, null); } } if (_audioRendererFilter != null) { //Log.Info("TSReaderPlayer:set reference clock"); IMediaFilter mp = (IMediaFilter)_graphBuilder; IReferenceClock clock = (IReferenceClock)_audioRendererFilter; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); //Log.Info("TSReaderPlayer:set reference clock:{0:X}", hr); _basicAudio = (IBasicAudio)_graphBuilder; } if (!_isRadio) { IBaseFilter basefilter; _graphBuilder.FindFilterByName("Line 21 Decoder", out basefilter); if (basefilter == null) { _graphBuilder.FindFilterByName("Line21 Decoder", out basefilter); } if (basefilter == null) { _graphBuilder.FindFilterByName("Line 21 Decoder 2", out basefilter); } if (basefilter != null) { Log.Info("TSreaderPlayer: Line21 Decoder (Closed Captions), in use"); //: {0}", showClosedCaptions); _line21Decoder = (IAMLine21Decoder)basefilter; if (_line21Decoder != null) { AMLine21CCState state = AMLine21CCState.Off; hr = _line21Decoder.SetServiceState(state); if (hr == 0) { Log.Info("TSReaderPlayer: Closed Captions state change successful"); } else { Log.Info("TSReaderPlayer: Failed to change Closed Captions state"); } } } if (!_vmr9.IsVMR9Connected) { Log.Error("TSReaderPlayer: Failed vmr9 not connected"); Cleanup(); return(false); } DirectShowUtil.EnableDeInterlace(_graphBuilder); _vmr9.SetDeinterlaceMode(); } using (MPSettings xmlreader = new MPSettings()) { int lastSubIndex = xmlreader.GetValueAsInt("tvservice", "lastsubtitleindex", 0); Log.Debug("TSReaderPlayer: Last subtitle index: {0}", lastSubIndex); CurrentSubtitleStream = lastSubIndex; } return(true); } catch (Exception ex) { Log.Error("TSReaderPlayer: Exception while creating DShow graph {0}", ex.Message); Cleanup(); return(false); } }