public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format, MediaPortal.Core.Transcoding.Quality quality, Standard standard) { if (!Supports(format)) { return(false); } string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg") { Log.Info("TSReader2MP4: wrong file format"); return(false); } try { graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2MP4: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2MP4: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader"); Cleanup(); return(false); } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2MP4: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: unable to add mpeg2 video decoder"); Cleanup(); return(false); } } else { Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec"); Cleanup(); return(false); } } if (usingAAC == false) { Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return(false); } } else { Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec"); Cleanup(); return(false); } } Log.Info("TSReader2MP4: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return(false); } //add encoders, muxer & filewriter if (!AddCodecs(graphBuilder, info)) { return(false); } //setup graph controls mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration Log.Info("TSReader2MP4: Get duration of recording"); long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration)); //run the graph to initialize the filters to be sure hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) { break; } maxCount--; if (maxCount <= 0) { break; } } mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); graphBuilder.RemoveFilter(mp4Muxer); graphBuilder.RemoveFilter(h264Encoder); graphBuilder.RemoveFilter(aacEncoder); graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter); if (!AddCodecs(graphBuilder, info)) { return(false); } //Set Encoder quality & Muxer settings if (!EncoderSet(graphBuilder, info)) { return(false); } //start transcoding - run the graph Log.Info("TSReader2MP4: start transcoding"); //setup flow control //need to leverage CBAsePin, CPullPin & IAsyncReader methods. IAsyncReader synchVideo = null; mediaSample = VideoCodec as IMediaSample; hr = synchVideo.SyncReadAligned(mediaSample); //So we only parse decoder output whent the encoders are ready. hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } } catch (Exception ex) { Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message); Cleanup(); return(false); } return(true); }
public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format, MediaPortal.Core.Transcoding.Quality quality, Standard standard) { if (!Supports(format)) { return(false); } string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLower() != ".dvr-ms" && ext.ToLower() != ".sbe") { Log.Info("DVRMS2DIVX: wrong file format"); return(false); } //disable xvid status window while encoding /* try * { * using (RegistryKey subkey = Registry.CurrentUser.OpenSubKey(@"Software\GNU\XviD", true)) * { * if (subkey != null) * { * Int32 uivalue = 0; * subkey.SetValue("display_status", (Int32)uivalue); * subkey.SetValue("debug", (Int32)uivalue); * subkey.SetValue("bitrate", (Int32)bitrate); * * uivalue = 1; * subkey.SetValue("interlacing", (Int32)uivalue); * } * } * } * catch (Exception) * { * }*/ //Type comtype = null; //object comobj = null; try { graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("DVRMS2DIVX: add filesource"); bufferSource = (IStreamBufferSource) new StreamBufferSource(); IBaseFilter filter = (IBaseFilter)bufferSource; graphBuilder.AddFilter(filter, "SBE SOURCE"); IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource; Log.Info("DVRMS2DIVX: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); /*string strDemuxerMoniker = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{AFB6C280-2C41-11D3-8A60-0000F81E0E4A}"; * * mpegDemuxer = Marshal.BindToMoniker(strDemuxerMoniker) as IBaseFilter; * if (mpegDemuxer == null) * { * Log.Error("DVRMS2DIVX:FAILED:unable to add mpeg2 demuxer"); * Cleanup(); * return false; * } * hr = graphBuilder.AddFilter(mpegDemuxer, "MPEG-2 Demultiplexer"); * if (hr != 0) * { * Log.Error("DVRMS2DIVX:FAILED:Add mpeg2 demuxer to filtergraph :0x{0:X}", hr); * Cleanup(); * return false; * }*/ //add mpeg2 audio/video codecs string strVideoCodecMoniker = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{39F498AF-1A09-4275-B193-673B0BA3D478}"; string strAudioCodec = "MPC - MPA Decoder Filter"; Log.Info("DVRMS2DIVX: add MPV mpeg2 video decoder"); Mpeg2VideoCodec = Marshal.BindToMoniker(strVideoCodecMoniker) as IBaseFilter; if (Mpeg2VideoCodec == null) { Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 video decoder"); Cleanup(); return(false); } hr = graphBuilder.AddFilter(Mpeg2VideoCodec, "MPC - MPEG-2 Video Decoder (Gabest)"); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:Add MPV mpeg2 video to filtergraph :0x{0:X}", hr); Cleanup(); return(false); } Log.Info("DVRMS2DIVX: add MPA mpeg2 audio codec:{0}", strAudioCodec); Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (Mpeg2AudioCodec == null) { Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 audio codec"); Cleanup(); return(false); } //connect output #0 of streambuffer source->mpeg2 audio codec pin 1 //connect output #1 of streambuffer source->mpeg2 video codec pin 1 Log.Info("DVRMS2DIVX: connect streambufer source->mpeg audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("DVRMS2DIVX:FAILED:unable to get pins of source"); Cleanup(); return(false); } pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio if (pinIn0 == null || pinIn1 == null) { Log.Error("DVRMS2DIVX:FAILED:unable to get pins of mpeg2 video/audio codec"); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut0, pinIn1); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:unable to connect audio pins :0x{0:X}", hr); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut1, pinIn0); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:unable to connect video pins :0x{0:X}", hr); Cleanup(); return(false); } if (!AddCodecs(graphBuilder, info)) { return(false); } // hr=(graphBuilder as IMediaFilter).SetSyncSource(null); // if (hr!=0) // Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr); mediaControl = graphBuilder as IMediaControl; mediaSeeking = bufferSource as IStreamBufferMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration Log.Info("DVRMS2DIVX: Get duration of movie"); long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("DVRMS2DIVX: movie duration:{0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration)); // hr=(graphBuilder as IMediaFilter).SetSyncSource(null); // if (hr!=0) // Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr); hr = mediaControl.Run(); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) { break; } maxCount--; if (maxCount <= 0) { break; } } mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); graphBuilder.RemoveFilter(aviMuxer); graphBuilder.RemoveFilter(divxCodec); graphBuilder.RemoveFilter(mp3Codec); graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter); if (!AddCodecs(graphBuilder, info)) { return(false); } // hr=(graphBuilder as IMediaFilter).SetSyncSource(null); // if (hr!=0) // Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr); Log.Info("DVRMS2DIVX: start transcoding"); hr = mediaControl.Run(); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } } catch (Exception ex) { Log.Error("DVRMS2DIVX:Unable create graph: {0}", ex.Message); Cleanup(); return(false); } return(true); }