public HRESULT Dispose() { try { m_MediaControl.Stop(); m_VideoWindow.put_Visible(0); m_MediaControl = null; m_VideoWindow = null; if (_graphBuilder != null) { _graphBuilder.RemoveFilter(_filterCamera.Value); _graphBuilder.RemoveFilter(_filterVMR9Renderer.Value); _filterCamera.Dispose(); _filterVMR9Renderer.Dispose(); _filterVMR9Renderer = null; _filterCamera = null; Marshal.ReleaseComObject(_graphBuilder); _graphBuilder = null; } GC.Collect(); return(COMHelper.NOERROR); } catch { return(COMHelper.E_FAIL); } }
/// <summary> /// Releases the capture device /// </summary> private void Release() { // Stop the thread _worker = null; // Clear the event if (_stopSignal != null) { _stopSignal.Close(); _stopSignal = null; } if (_graph != null) { _graph.Abort(); _graph.Disconnect(_sourceObject.GetPin(PinDirection.Output, 0)); _graph.Disconnect(_grabberObject.GetPin(PinDirection.Input, 0)); _graph.RemoveFilter(_sourceObject); _graph.RemoveFilter(_grabberObject); } // Clean up _graph = null; _sourceObject = null; _grabberObject = null; _grabber = null; _capGrabber = null; _control = null; }
/// <summary> /// Removes all filters downstream from a filter from the graph. /// This is called only by DerenderGraph() to remove everything /// from the graph except the devices and compressors. The parameter /// "removeFirstFilter" is used to keep a compressor (that should /// be immediately downstream of the device) if one is begin used. /// </summary> void RemoveDownstream(IBaseFilter filter) { // Get a pin enumerator off the filter var hr = filter.EnumPins(out IEnumPins pinEnum); if (pinEnum == null) { return; } pinEnum.Reset(); if (hr == 0) { // Loop through each pin var pins = new IPin[1]; do { // Get the next pin hr = pinEnum.Next(1, pins, out int _); if (hr == 0 && pins[0] != null) { // Get the pin it is connected to pins[0].ConnectedTo(out IPin pinTo); if (pinTo != null) { // Is this an input pin? hr = pinTo.QueryPinInfo(out PinInfo info); if (hr == 0 && info.dir == PinDirection.Input) { // Recurse down this branch RemoveDownstream(info.filter); // Disconnect _graphBuilder.Disconnect(pinTo); _graphBuilder.Disconnect(pins[0]); // Remove this filter // but don't remove the video or audio compressors if (info.filter != _videoCompressorFilter) { _graphBuilder.RemoveFilter(info.filter); } } Marshal.ReleaseComObject(info.filter); Marshal.ReleaseComObject(pinTo); } Marshal.ReleaseComObject(pins[0]); } } while (hr == 0); Marshal.ReleaseComObject(pinEnum); } }
private void InitVideoGrabber(IBaseFilter sourceF) { videoGrabberFilter = new SampleGrabber() as IBaseFilter; if (videoGrabberFilter == null) { throw new COMException("Cannot create SampleGrabber"); } int hr = graph.AddFilter(videoGrabberFilter, "Video Sample Grabber"); DsError.ThrowExceptionForHR(hr); videoGrabber = videoGrabberFilter as ISampleGrabber; if (videoGrabber == null) { throw new COMException("Cannot obtain ISampleGrabber"); } { AMMediaType mt = new AMMediaType(); mt.majorType = DirectShowLib.MediaType.Video; mt.subType = DirectShowLib.MediaSubType.RGB24; hr = videoGrabber.SetMediaType(mt); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mt); } hr = ConnectSampleGrabber(graph, sourceF, videoGrabberFilter); if (0 != hr) { // Cannot connect the video grabber. Remove the filter from the graph. hr = graph.RemoveFilter(videoGrabberFilter); DsError.ThrowExceptionForHR(hr); Util.ReleaseComObject(ref videoGrabberFilter); videoGrabber = null; return; } videoNullFilter = new NullRenderer() as IBaseFilter; if (videoNullFilter == null) { throw new COMException("Cannot create NullRenderer"); } hr = graph.AddFilter(videoNullFilter, "Null Filter"); DsError.ThrowExceptionForHR(hr); hr = Util.ConnectFilters(graph, videoGrabberFilter, videoNullFilter); DsError.ThrowExceptionForHR(hr); videoGrabberCB = new SampleGrabberCB(); hr = videoGrabber.SetCallback(videoGrabberCB, (int)CBMethod.Sample); DsError.ThrowExceptionForHR(hr); }
// TODO : add audio playback public void Play(IBaseFilter source, IBaseFilter audioSource) { int hr = 0; if (currentState == PlayState.Running) { ShowCapture = false; if (this.source != null) { hr = graphBuilder.RemoveFilter(this.source); DsError.ThrowExceptionForHR(hr); } if (this.audioSource != null) { hr = graphBuilder.RemoveFilter(this.audioSource); DsError.ThrowExceptionForHR(hr); } } this.source = source; this.audioSource = audioSource; if (source == null) { ShowCapture = false; SourceWidth = 0; SourceHeight = 0; } else { if (source != null) { hr = graphBuilder.AddFilter(source, "Video Capture"); DsError.ThrowExceptionForHR(hr); hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, source, null, outputRenderer); DsError.ThrowExceptionForHR(hr); } if (audioSource != null) { hr = graphBuilder.AddFilter(audioSource, "Audio Capture"); DsError.ThrowExceptionForHR(hr); hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Audio, audioSource, null, null); DsError.ThrowExceptionForHR(hr); } InitVideoWidnow(Handle); hr = mediaControl.Run(); DsError.ThrowExceptionForHR(hr); currentState = PlayState.Running; } }
private void Stop() { media_control.Stop(); if (active_config != null) { graph_builder.RemoveFilter(active_config.Filter); } graph_builder.RemoveFilter((IBaseFilter)grabber); active_config = null; bPlay.Text = "Play"; }
public void RenderAudio(IGraphBuilder pGraphBuilder, IBaseFilter splitter) { IPin pPin; var nSkip = 0; while ((pPin = DsUtils.GetPin(splitter, PinDirection.Output, false, nSkip)) != null) { if (DsUtils.IsMediaTypeSupported(pPin, MediaType.Audio) == 0) { // this unconnected pin supports audio type! // let's render it! if (BuildSoundRenderer(pGraphBuilder)) { var pInputPin = DsUtils.GetPin(_directSoundBaseFilters.Last(), PinDirection.Input); var hr = pGraphBuilder.Connect(pPin, pInputPin); Marshal.ReleaseComObject(pInputPin); if (hr == DsHlp.S_OK || hr == DsHlp.VFW_S_PARTIAL_RENDER) { if (_directSoundBaseFilters.Count == 8) { Marshal.ReleaseComObject(pPin); break; // out of while cycle } } else { var pBaseFilter = _directSoundBaseFilters.Last(); pGraphBuilder.RemoveFilter(pBaseFilter); Marshal.ReleaseComObject(pBaseFilter); _basicAudioInterfaces.RemoveAt(_basicAudioInterfaces.Count - 1); _directSoundBaseFilters.RemoveAt(_directSoundBaseFilters.Count - 1); nSkip++; } } else { // could not create/add DirectSound filter Marshal.ReleaseComObject(pPin); break; // out of while cycle } } else { nSkip++; } Marshal.ReleaseComObject(pPin); } // end of while _currentAudioStream = 0; _audioStreamsCount = _basicAudioInterfaces.Count; const int lVolume = -10000; for (var i = 1; i < _audioStreamsCount; i++) { _basicAudioInterfaces[i].put_Volume(lVolume); } }
public static void RemoveAllFilters(IGraphBuilder graphBuilder) { int hr = 0; IEnumFilters enumFilters; var filtersArray = new ArrayList(); if (graphBuilder == null) { throw new ArgumentNullException("graphBuilder"); } hr = graphBuilder.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); try { var filters = new IBaseFilter[1]; IntPtr fetched = IntPtr.Zero; while (enumFilters.Next(filters.Length, filters, fetched) == 0) { filtersArray.Add(filters[0]); } } finally { Marshal.ReleaseComObject(enumFilters); } foreach (IBaseFilter filter in filtersArray) { hr = graphBuilder.RemoveFilter(filter); Marshal.ReleaseComObject(filter); } }
private void ResetGraph(IBaseFilter source) { IEnumFilters enumFilters; var hr = graphBuilder.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); var array = new IBaseFilter[1]; var filters = new List <IBaseFilter>(); while (enumFilters.Next(array.Length, array, IntPtr.Zero) == 0) { filters.Add(array[0]); } foreach (var filter in filters.Where(filter => filter != source)) { hr = graphBuilder.RemoveFilter(filter); DsError.ThrowExceptionForHR(hr); Marshal.FinalReleaseComObject(filter); // while (Marshal.ReleaseComObject(filter) > 0) // { // } } Marshal.ReleaseComObject(enumFilters); }
public static IBaseFilter AddFilterById(IGraphBuilder graph, Guid guid, string name) { Ensure.IsNotNull(Log, graph, "graph is null"); IBaseFilter filter = null; try { var type = Type.GetTypeFromCLSID(guid); filter = (IBaseFilter)Activator.CreateInstance(type); var hr = graph.AddFilter(filter, name); DsError.ThrowExceptionForHR(hr); } catch (Exception ex) { if (filter != null) { graph.RemoveFilter(filter); Marshal.ReleaseComObject(filter); filter = null; } Log.Fatal(string.Format("Filter {0} is not added to the graph", name) + ex); } return filter; }
public bool LoadSubtitles(IGraphBuilder graphBuilder, string filename) { LoadSettings(); MpcSubtitles.SetDefaultStyle(ref this.defStyle, this.overrideASSStyle); if (selectionOff) { MpcSubtitles.SetShowForcedOnly(false); } else { MpcSubtitles.SetShowForcedOnly(!this.autoShow); } //remove DirectVobSub DirectVobSubUtil.RemoveFromGraph(graphBuilder); { //remove InternalScriptRenderer as it takes subtitle pin IBaseFilter isr = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.InternalScriptRenderer, out isr); if (isr != null) { graphBuilder.RemoveFilter(isr); DirectShowUtil.ReleaseComObject(isr); } } FFDShowEngine.DisableFFDShowSubtitles(graphBuilder); Size size = new Size(GUIGraphicsContext.Width, GUIGraphicsContext.Height); return(MpcSubtitles.LoadSubtitles( DirectShowUtil.GetUnmanagedDevice(GUIGraphicsContext.DX9Device), size, filename, graphBuilder, subPaths)); }
protected void CloseSource() { if (m_graph != null) { IBaseFilter sourceFilter = null; try { int result = m_graph.FindFilterByName(OnlineVideos.MPUrlSourceFilter.Downloader.FilterName, out sourceFilter); if (result == 0 && sourceFilter != null) { ((IAMOpenProgress)sourceFilter).AbortOperation(); System.Threading.Thread.Sleep(100); // give it some time m_graph.RemoveFilter(sourceFilter); // remove the filter from the graph to prevent lockup later } } catch (Exception ex) { } finally { if (sourceFilter != null) { Marshal.FinalReleaseComObject(sourceFilter); } } } }
public void RenderAudio(IGraphBuilder pGraphBuilder, IBaseFilter splitter) { var pPin = DsUtils.GetPin(splitter, PinDirection.Output, new[] { MediaType.Audio }); if (pPin != null) { _streamSelect = splitter as IAMStreamSelect; if (_streamSelect != null && BuildSoundRenderer(pGraphBuilder)) { var pInputPin = DsUtils.GetPin(_directSoundBaseFilter, PinDirection.Input); var hr = pGraphBuilder.Connect(pPin, pInputPin); Marshal.ReleaseComObject(pInputPin); if (hr == DsHlp.S_OK || hr == DsHlp.VFW_S_PARTIAL_RENDER) { _audioStreams.AddRange(_streamSelect.GetSelectableStreams().Where(s => s.MajorType == MediaType.Audio)); } else { pGraphBuilder.RemoveFilter(_directSoundBaseFilter); Marshal.FinalReleaseComObject(_directSoundBaseFilter); _directSoundBaseFilter = null; _basicAudio = null; } } Marshal.ReleaseComObject(pPin); } }
public void RemoveAllFilters() { int hr = 0; IEnumFilters enumFilters; ArrayList filtersArray = new ArrayList(); hr = graph.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); IBaseFilter[] filters = new IBaseFilter[1]; while (enumFilters.Next(filters.Length, filters, IntPtr.Zero) == 0) { filtersArray.Add(filters[0]); } foreach (IBaseFilter filter in filtersArray) { hr = graph.RemoveFilter(filter); while (Marshal.ReleaseComObject(filter) > 0) { ; } } }
public static Tuple <IBaseFilter, IBasicAudio> AddSoundRenderer(this IGraphBuilder pGraphBuilder) { var baseFilter = DsUtils.GetFilter(Clsid.DSoundRender, false); if (baseFilter == null) { TraceSink.GetTraceSink().TraceWarning("Could not instantiate DirectSound Filter."); return(null); } // add the DirectSound filter to the graph var hr = pGraphBuilder.AddFilter(baseFilter, "DirectSound Filter"); if (DsHlp.FAILED(hr)) { Marshal.FinalReleaseComObject(baseFilter); TraceSink.GetTraceSink().TraceWarning("Could not add DirectSound Filter to the filter graph."); return(null); } IBasicAudio basicAudio = baseFilter as IBasicAudio; if (basicAudio == null) { pGraphBuilder.RemoveFilter(baseFilter); Marshal.FinalReleaseComObject(baseFilter); TraceSink.GetTraceSink().TraceWarning("Could not get IBasicAudio interface."); return(null); } return(new Tuple <IBaseFilter, IBasicAudio>(baseFilter, basicAudio)); }
public static IBaseFilter AddFilterById(IGraphBuilder graph, Guid guid, string name) { Ensure.IsNotNull(Log, graph, "graph is null"); IBaseFilter filter = null; try { var type = Type.GetTypeFromCLSID(guid); filter = (IBaseFilter)Activator.CreateInstance(type); var hr = graph.AddFilter(filter, name); DsError.ThrowExceptionForHR(hr); } catch (Exception ex) { if (filter != null) { graph.RemoveFilter(filter); Marshal.ReleaseComObject(filter); filter = null; } Log.Fatal(string.Format("Filter {0} is not added to the graph", name) + ex); } return(filter); }
protected void ReleaseAudioSampleGrabber() { try { if (sampleAnalyzerMustStop != null) { sampleAnalyzerMustStop.Set(); // This will cause the thread to stop } if (sampleAnalyzerThread != null) { sampleAnalyzerThread.Join(200); } IBaseFilter filter = sampleGrabber as IBaseFilter; if (filter != null) { IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder != null) { int hr = graphBuilder.RemoveFilter(filter); DsError.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(filter); sampleGrabber = null; } } catch (Exception ex) { Logger.LogException(ex); } if (rotEntry != null) { rotEntry.Dispose(); rotEntry = null; } lock (_vuLock) { _vuMeterData = null; } lock (_waveformLock) { _waveformData = null; } lock (_spectrogramLock) { _spectrogramData = null; } _actualAudioFormat = null; sampleGrabberConfigured.Reset(); }
/// <summary> /// Frees the audio/video codecs. /// </summary> protected virtual void FreeCodecs() { if (_streamFilter != null) { if (_graphBuilder != null) { _graphBuilder.RemoveFilter(_streamFilter); } FilterGraphTools.TryDispose(ref _streamFilter); } // If we opened an own Stream, dispose it here FilterGraphTools.TryDispose(ref _resourceStream); }
/// <summary> /// Removes all filters from a DirectShow graph /// </summary> /// <param name="graphBuilder">The DirectShow graph to remove all the filters from</param> protected static void RemoveAllFilters(IGraphBuilder graphBuilder) { if (graphBuilder == null) { return; } IEnumFilters enumFilters; /* The list of filters from the DirectShow graph */ var filtersArray = new List <IBaseFilter>(); if (graphBuilder == null) { throw new ArgumentNullException("graphBuilder"); } /* Gets the filter enumerator from the graph */ int hr = graphBuilder.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); try { /* This array is filled with reference to a filter */ var filters = new IBaseFilter[1]; IntPtr fetched = IntPtr.Zero; /* Get reference to all the filters */ while (enumFilters.Next(filters.Length, filters, fetched) == 0) { /* Add the filter to our array */ filtersArray.Add(filters[0]); } } finally { /* Enum filters is a COM, so release that */ Marshal.ReleaseComObject(enumFilters); } /* Loop over and release each COM */ for (int i = 0; i < filtersArray.Count; i++) { graphBuilder.RemoveFilter(filtersArray[i]); while (Marshal.ReleaseComObject(filtersArray[i]) > 0) { } } }
/// <summary> /// Loads the provided file into a Windows Media ASF Filter for reading /// If the filter already exists, remove it from the graph, because... /// /// The WM ASF Reader only allows you to Load 1 file per instance. So re-create each time /// through. See IFileSourceFilter documentation. /// </summary> private void CreateWMASFReader(string fileName) { if (wmASFReader != null) { iGB.RemoveFilter(wmASFReader); } wmASFReader = Filter.CreateBaseFilterByName("WM ASF Reader"); iGB.AddFilter(wmASFReader, "WM ASF Reader"); _AMMediaType wmvmt = new _AMMediaType(); ((IFileSourceFilter)wmASFReader).Load(fileName, ref wmvmt); }
protected void removeDownstream(IBaseFilter filter, bool removeFirstFilter) { // Get a pin enumerator off the filter IEnumPins pinEnum; int hr = filter.EnumPins(out pinEnum); pinEnum.Reset(); if ((hr == 0) && (pinEnum != null)) { // Loop through each pin IPin[] pins = new IPin[1]; int f; do { // Get the next pin hr = pinEnum.Next(1, pins, out f); if ((hr == 0) && (pins[0] != null)) { // Get the pin it is connected to IPin pinTo = null; pins[0].ConnectedTo(out pinTo); if (pinTo != null) { // Is this an input pin? PinInfo info = new PinInfo(); hr = pinTo.QueryPinInfo(out info); if ((hr == 0) && (info.dir == (PinDirection.Input))) { // Recurse down this branch removeDownstream(info.filter, true); // Disconnect graphBuilder.Disconnect(pinTo); graphBuilder.Disconnect(pins[0]); // Remove this filter // but don't remove the video or audio compressors if ((info.filter != videoCompressorFilter) && (info.filter != audioCompressorFilter)) { graphBuilder.RemoveFilter(info.filter); } } Marshal.ReleaseComObject(info.filter); Marshal.ReleaseComObject(pinTo); } Marshal.ReleaseComObject(pins[0]); } }while (hr == 0); Marshal.ReleaseComObject(pinEnum); pinEnum = null; } }
public static void tビデオレンダラをグラフから除去する(IGraphBuilder graphBuilder) { int hr = 0; IBaseFilter videoRenderer = null; IPin renderInputPin = null; IPin connectedOutputPin = null; try { // videoRenderer を探す。 CDirectShow.tビデオレンダラとその入力ピンを探して返す(graphBuilder, out videoRenderer, out renderInputPin); if (videoRenderer == null || renderInputPin == null) { return; // なかった } #region [ renderInputPin へ接続している前段の出力ピン connectedOutputPin を探す。 ] //----------------- renderInputPin.ConnectedTo(out connectedOutputPin); //----------------- #endregion if (connectedOutputPin == null) { return; // なかった } // 前段の出力ピンとビデオレンダラの入力ピンを切断する。双方向から切断しないとグラフから切り離されないので注意。 renderInputPin.Disconnect(); connectedOutputPin.Disconnect(); // ビデオレンダラをグラフから除去。 graphBuilder.RemoveFilter(videoRenderer); } finally { CCommon.tReleaseComObject(ref connectedOutputPin); CCommon.tReleaseComObject(ref renderInputPin); CCommon.tReleaseComObject(ref videoRenderer); } }
private void CleanUpGraph(IGraphBuilder graphBuilder) { int hr = 0; IEnumFilters enumFilters = null; IBaseFilter[] filters = new IBaseFilter[1]; hr = graphBuilder.EnumFilters(out enumFilters); Marshal.ThrowExceptionForHR(hr); while (enumFilters.Next(1, filters, IntPtr.Zero) == 0) { hr = graphBuilder.RemoveFilter(filters[0]); Marshal.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(filters[0]); enumFilters.Reset(); } Marshal.ReleaseComObject(enumFilters); }
/// <summary> /// removes the VMR7 filter from the graph and free up all unmanaged resources /// </summary> public void RemoveVMR7() { if (vmr7intialized) { int result; Log.Info("VMR7Helper:RemoveVMR7"); //if (m_mixerBitmap != null) // while ((result=DirectShowUtil.ReleaseComObject(m_mixerBitmap))>0); m_mixerBitmap = null; // if (quality != null) // while ((result=DirectShowUtil.ReleaseComObject(quality))>0); quality = null; if (VMR7Filter != null) { //while ((result=DirectShowUtil.ReleaseComObject(VMR7Filter))>0); try { result = m_graphBuilder.RemoveFilter(VMR7Filter); if (result != 0) { Log.Info("VMR7Helper:RemoveFilter():{0}", result); } } catch (Exception) {} while ((result = DirectShowUtil.ReleaseComObject(VMR7Filter)) > 0) { ; } if (result != 0) { Log.Info("VMR7Helper:ReleaseComObject():{0}", result); } m_graphBuilder = null; } vmr7intialized = false; g_vmr7 = null; } }
public void RemoveFilter(Filter f, bool recalcPaths) { try { if (f.sampleGrabberForm != null) { f.sampleGrabberForm.Close(); //deletes callback and links to the SG form } if (!disconnecting) { f.BaseFilter.Stop(); int hr = graphBuilder.RemoveFilter(f.BaseFilter); DsError.ThrowExceptionForHR(hr); } f.ReleaseBaseFilter(); } catch (COMException e) { ShowCOMException(e, "Error removing filter " + f.Name); return; } catch (Exception e) { MessageBox.Show(e.Message, "Exception caught while removing filter " + f.Name); return; } Form.StopAnimation(f); foreach (Pin p in f.Pins) { Form.StopAnimation(p); } history.RemoveFilter(f.Name); PlaceFilter(f, false); f.JoinGraph(null, disconnecting); filters.Remove(f); if (recalcPaths) { RecalcPaths(); } }
protected virtual void InsertAudioFilter(IBaseFilter sourceFilter, string audioDecoder) { if (string.IsNullOrEmpty(audioDecoder)) { return; } // Set Audio Codec // Remove Pin var audioPinFrom = DirectShowLib.DsFindPin.ByName(sourceFilter, "Audio"); IPin audioPinTo; if (audioPinFrom != null) { int hr = audioPinFrom.ConnectedTo(out audioPinTo); if (hr >= 0 && audioPinTo != null) { PinInfo pInfo; audioPinTo.QueryPinInfo(out pInfo); FilterInfo fInfo; pInfo.filter.QueryFilterInfo(out fInfo); DirectShowUtil.DisconnectAllPins(m_graph, pInfo.filter); m_graph.RemoveFilter(pInfo.filter); DsUtils.FreePinInfo(pInfo); Marshal.ReleaseComObject(fInfo.pGraph); Marshal.ReleaseComObject(audioPinTo); audioPinTo = null; } Marshal.ReleaseComObject(audioPinFrom); audioPinFrom = null; } DirectShowUtil.AddFilterToGraph(m_graph, audioDecoder, Guid.Empty); }
public static void RemoveFilters(IGraphBuilder graphBuilder, string filterName) { if (graphBuilder == null) { return; } int hr = 0; IEnumFilters enumFilters = null; ArrayList filtersArray = new ArrayList(); try { hr = graphBuilder.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); IBaseFilter[] filters = new IBaseFilter[1]; IntPtr fetched = IntPtr.Zero; while (enumFilters.Next(filters.Length, filters, fetched) == 0) { filtersArray.Add(filters[0]); } foreach (IBaseFilter filter in filtersArray) { FilterInfo info; filter.QueryFilterInfo(out info); Marshal.ReleaseComObject(info.pGraph); try { if (!String.IsNullOrEmpty(filterName)) { if (String.Equals(info.achName, filterName)) { DisconnectAllPins(graphBuilder, filter); hr = graphBuilder.RemoveFilter(filter); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(filter); System.Diagnostics.Trace.WriteLine("Remove filter from graph: {0}", info.achName); } } else { DisconnectAllPins(graphBuilder, filter); hr = graphBuilder.RemoveFilter(filter); DsError.ThrowExceptionForHR(hr); int i = Marshal.ReleaseComObject(filter); System.Diagnostics.Trace.WriteLine(string.Format("Remove filter from graph: {0} {1}", info.achName, i)); } } catch (Exception error) { System.Diagnostics.Trace.TraceError("Remove of filter: {0}, failed with code (HR): {1}, explanation: {2}", info.achName, hr.ToString(), error.Message); } } } catch (Exception) { return; } finally { if (enumFilters != null) { Marshal.ReleaseComObject(enumFilters); } } }
public bool LoadSubtitles(IGraphBuilder graphBuilder, string filename) { FreeSubtitles(); LoadSettings(); { //remove InternalScriptRenderer as it takes subtitle pin IBaseFilter isr = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.InternalScriptRenderer, out isr); if (isr != null) { graphBuilder.RemoveFilter(isr); DirectShowUtil.ReleaseComObject(isr); } } vobSub = (IDirectVobSub)DirectVobSubUtil.AddToGraph(graphBuilder); if (vobSub == null) return false; { //set style Log.Debug("VideoPlayerVMR9: Setting DirectVobsub parameters"); LOGFONT logFont = new LOGFONT(); int txtcolor; bool fShadow, fOutLine, fAdvancedRenderer = false; int size = Marshal.SizeOf(typeof(LOGFONT)); vobSub.get_TextSettings(logFont, size, out txtcolor, out fShadow, out fOutLine, out fAdvancedRenderer); FontStyle fontStyle = defStyle.fontIsBold ? FontStyle.Regular : FontStyle.Bold; Font Subfont = new Font(defStyle.fontName, defStyle.fontSize, fontStyle, GraphicsUnit.Point, (byte)defStyle.fontCharset); Subfont.ToLogFont(logFont); fShadow = defStyle.shadow > 0; fOutLine = defStyle.isBorderOutline; vobSub.put_TextSettings(logFont, size, defStyle.fontColor, fShadow, fOutLine, fAdvancedRenderer); vobSub.put_FileName(filename); bool fBuffer, fOnlyForced, fPolygonize; vobSub.get_VobSubSettings(out fBuffer, out fOnlyForced, out fPolygonize); vobSub.put_VobSubSettings(fBuffer, !this.autoShow, fPolygonize); } { //load sub streams IBaseFilter hms = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.HaaliGuid, out hms); if (hms == null) DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.LAVFilterSource, out hms); if (hms == null) DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.LAVFilter, out hms); embeddedSelector = hms as IAMStreamSelect; if (embeddedSelector != null) { AddStreams(embeddedSelector); } vobSub.get_LanguageCount(out extCount); if (intSubs.Count > 0) { //if there are embedded subtitles, //last stream of directvobsub is currently selected embedded subtitle extCount--; } } FFDShowEngine.DisableFFDShowSubtitles(graphBuilder); Current = 0; if (selectionOff) { Enable = false; } else { Enable = autoShow; } return true; }
/// <summary> /// Opens the media by initializing the DirectShow graph /// </summary> protected virtual void OpenSource() { /* Make sure we clean up any remaining mess */ FreeResources(); if (m_sourceUri == null) { return; } string fileSource = m_sourceUri.OriginalString; if (string.IsNullOrEmpty(fileSource)) { return; } try { /* Creates the GraphBuilder COM object */ m_graph = new FilterGraphNoThread() as IGraphBuilder; if (m_graph == null) { throw new Exception("Could not create a graph"); } var filterGraph = m_graph as IFilterGraph2; if (filterGraph == null) { throw new Exception("Could not QueryInterface for the IFilterGraph2"); } IBaseFilter sourceFilter; int hr; //var file = System.IO.File.CreateText(@"M:\DirectShowLog.txt"); //filterGraph.SetLogFile((file.BaseStream as System.IO.FileStream).SafeFileHandle.DangerousGetHandle()); // Set LAV Splitter LAVSplitterSource reader = new LAVSplitterSource(); sourceFilter = reader as IBaseFilter; var objectWithSite = reader as IObjectWithSite; if (objectWithSite != null) { objectWithSite.SetSite(this); } hr = m_graph.AddFilter(sourceFilter, SplitterSource); DsError.ThrowExceptionForHR(hr); IFileSourceFilter interfaceFile = (IFileSourceFilter)sourceFilter; hr = interfaceFile.Load(fileSource, null); DsError.ThrowExceptionForHR(hr); // Set Video Codec // Remove Pin var videoPinFrom = DirectShowLib.DsFindPin.ByName(sourceFilter, "Video"); IPin videoPinTo; if (videoPinFrom != null) { hr = videoPinFrom.ConnectedTo(out videoPinTo); if (hr >= 0 && videoPinTo != null) { PinInfo pInfo; videoPinTo.QueryPinInfo(out pInfo); FilterInfo fInfo; pInfo.filter.QueryFilterInfo(out fInfo); DirectShowUtil.DisconnectAllPins(m_graph, pInfo.filter); m_graph.RemoveFilter(pInfo.filter); DsUtils.FreePinInfo(pInfo); Marshal.ReleaseComObject(fInfo.pGraph); Marshal.ReleaseComObject(videoPinTo); videoPinTo = null; } Marshal.ReleaseComObject(videoPinFrom); videoPinFrom = null; } DirectShowUtil.AddFilterToGraph(m_graph, VideoDecoder, Guid.Empty); // Set Audio Codec // Remove Pin var audioPinFrom = DirectShowLib.DsFindPin.ByName(sourceFilter, "Audio"); IPin audioPinTo; if (audioPinFrom != null) { hr = audioPinFrom.ConnectedTo(out audioPinTo); if (hr >= 0 && audioPinTo != null) { PinInfo pInfo; audioPinTo.QueryPinInfo(out pInfo); FilterInfo fInfo; pInfo.filter.QueryFilterInfo(out fInfo); DirectShowUtil.DisconnectAllPins(m_graph, pInfo.filter); m_graph.RemoveFilter(pInfo.filter); DsUtils.FreePinInfo(pInfo); Marshal.ReleaseComObject(fInfo.pGraph); Marshal.ReleaseComObject(audioPinTo); audioPinTo = null; } Marshal.ReleaseComObject(audioPinFrom); audioPinFrom = null; } DirectShowUtil.AddFilterToGraph(m_graph, AudioDecoder, Guid.Empty); /* Add our prefered audio renderer */ InsertAudioRenderer(AudioRenderer); IBaseFilter renderer = CreateVideoRenderer(VideoRenderer, m_graph, 2); /* We will want to enum all the pins on the source filter */ IEnumPins pinEnum; hr = sourceFilter.EnumPins(out pinEnum); DsError.ThrowExceptionForHR(hr); IntPtr fetched = IntPtr.Zero; IPin[] pins = { null }; /* Counter for how many pins successfully rendered */ int pinsRendered = 0; if (VideoRenderer == VideoRendererType.VideoMixingRenderer9) { var mixer = renderer as IVMRMixerControl9; if (mixer != null) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetRGB; //mixer.SetMixingPrefs(dwPrefs); } } /* Test using FFDShow Video Decoder Filter * var ffdshow = new FFDShow() as IBaseFilter; * * if (ffdshow != null) * m_graph.AddFilter(ffdshow, "ffdshow"); */ /* Loop over each pin of the source filter */ while (pinEnum.Next(pins.Length, pins, fetched) == 0) { if (filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero) >= 0) { pinsRendered++; } Marshal.ReleaseComObject(pins[0]); } Marshal.ReleaseComObject(pinEnum); Marshal.ReleaseComObject(sourceFilter); if (pinsRendered == 0) { throw new Exception("Could not render any streams from the source Uri"); } #if DEBUG /* Adds the GB to the ROT so we can view * it in graphedit */ m_dsRotEntry = new DsROTEntry(m_graph); #endif /* Configure the graph in the base class */ SetupFilterGraph(m_graph); HasVideo = true; /* Sets the NaturalVideoWidth/Height */ //SetNativePixelSizes(renderer); } catch (Exception ex) { /* This exection will happen usually if the media does * not exist or could not open due to not having the * proper filters installed */ FreeResources(); /* Fire our failed event */ InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); } InvokeMediaOpened(); }
/// <summary> /// Removes all filters from a DirectShow graph /// </summary> /// <param name="graphBuilder">The DirectShow graph to remove all the filters from</param> protected static void RemoveAllFilters(IGraphBuilder graphBuilder) { if (graphBuilder == null) return; IEnumFilters enumFilters; /* The list of filters from the DirectShow graph */ var filtersArray = new List<IBaseFilter>(); if (graphBuilder == null) throw new ArgumentNullException("graphBuilder"); /* Gets the filter enumerator from the graph */ int hr = graphBuilder.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); try { /* This array is filled with reference to a filter */ var filters = new IBaseFilter[1]; IntPtr fetched = IntPtr.Zero; /* Get reference to all the filters */ while (enumFilters.Next(filters.Length, filters, fetched) == 0) { /* Add the filter to our array */ filtersArray.Add(filters[0]); } } finally { /* Enum filters is a COM, so release that */ Marshal.ReleaseComObject(enumFilters); } /* Loop over and release each COM */ for (int i = 0; i < filtersArray.Count; i++) { graphBuilder.RemoveFilter(filtersArray[i]); while (Marshal.ReleaseComObject(filtersArray[i]) > 0) {} } }
/// <summary> /// Opens the media by initializing the DirectShow graph /// </summary> protected virtual void OpenSource() { string fileSource = m_sourceUri.OriginalString; /* Make sure we clean up any remaining mess */ FreeResources(); try { /* Creates the GraphBuilder COM object */ m_graph = new FilterGraph() as IGraphBuilder; if (m_graph == null) throw new Exception("Could not create a graph"); m_renderer = CreateVideoRenderer(VideoRenderer, m_graph); /* Add our prefered audio renderer */ InsertAudioRenderer(AudioRenderer); var filterGraph = m_graph as IFilterGraph2; // Switched this down from IFilterGraph3 if (filterGraph == null) throw new Exception("Could not QueryInterface for the IFilterGraph3"); IBaseFilter sourceFilter; /* Have DirectShow find the correct source filter for the Uri */ int hr = filterGraph.AddSourceFilter(fileSource, fileSource, out sourceFilter); DsError.ThrowExceptionForHR(hr); // Check for subtitles. IPin ip; sourceFilter.FindPin("Subtitle", out ip); if (ip != null) { /* Add DirectVobSub to the graph before rendering pins, so it's connected */ IBaseFilter dvs; dvs = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new DsGuid("{93A22E7A-5091-45EF-BA61-6DA26156A5D0}"))); m_graph.AddFilter(dvs, "DirectVobSub"); } /* We will want to enum all the pins on the source filter */ IEnumPins pinEnum; hr = sourceFilter.EnumPins(out pinEnum); DsError.ThrowExceptionForHR(hr); IntPtr fetched = IntPtr.Zero; IPin[] pins = { null }; /* Counter for how many pins successfully rendered */ int pinsRendered = 0; /* Loop over each pin of the source filter */ while (pinEnum.Next(pins.Length, pins, fetched) == 0) { if (filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero) == 0) pinsRendered++; } NewAllocatorFrame += new Action(MediaUriPlayer_NewAllocatorFrame); Marshal.ReleaseComObject(pinEnum); Marshal.ReleaseComObject(sourceFilter); if (pinsRendered == 0) throw new Exception("Could not render any streams from the source Uri"); Thread.CurrentThread.Priority = ThreadPriority.Normal; #if DEBUG /* Adds the GB to the ROT so we can view * it in graphedit */ m_dsRotEntry = new DsROTEntry(m_graph); #endif /* Configure the graph in the base class */ SetupFilterGraph(m_graph); /* Sets the NaturalVideoWidth/Height */ SetNativePixelSizes(m_renderer); // Populate the filters Filters = new FilterControl(m_graph); /* Remove and dispose of renderer if we do not * have a video stream */ if (!HasVideo) { m_graph.RemoveFilter(m_renderer); /* Tells the base class to unregister and * free the custom allocator */ FreeCustomAllocator(); Marshal.FinalReleaseComObject(m_renderer); m_renderer = null; } } catch (Exception ex) { /* This exection will happen usually if the media does * not exist or could not open due to not having the * proper filters installed */ FreeResources(); /* Fire our failed event */ InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); } InvokeMediaOpened(); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(string FileName) { int hr; // Get the graphbuilder object this.graphBuilder = new FilterGraph() as IGraphBuilder; this.mediaControl = this.graphBuilder as IMediaControl; this.mediaSeeking = this.graphBuilder as IMediaSeeking; this.mediaEvent = this.graphBuilder as IMediaEvent; try { // Get the SampleGrabber interface this.sampleGrabber = new SampleGrabber() as ISampleGrabber; this.sampleGrabberFilter = sampleGrabber as IBaseFilter; ConfigureSampleGrabber(sampleGrabber); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(sampleGrabberFilter, "Ds.NET Sample Grabber"); DsError.ThrowExceptionForHR(hr); IBaseFilter aviSplitter = new AviSplitter() as IBaseFilter; // Add the aviSplitter to the graph hr = graphBuilder.AddFilter(aviSplitter, "Splitter"); DsError.ThrowExceptionForHR(hr); // Have the graph builder construct its appropriate graph automatically hr = this.graphBuilder.RenderFile(FileName, null); DsError.ThrowExceptionForHR(hr); #if DEBUG m_rot = new DsROTEntry(graphBuilder); #endif // Remove the video renderer filter IBaseFilter defaultVideoRenderer = null; graphBuilder.FindFilterByName("Video Renderer", out defaultVideoRenderer); graphBuilder.RemoveFilter(defaultVideoRenderer); // Disconnect anything that is connected // to the output of the sample grabber IPin iPinSampleGrabberOut = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Output, 0); IPin iPinVideoIn; hr = iPinSampleGrabberOut.ConnectedTo(out iPinVideoIn); if (hr == 0) { // Disconnect the sample grabber output from the attached filters hr = iPinVideoIn.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = iPinSampleGrabberOut.Disconnect(); DsError.ThrowExceptionForHR(hr); } else { // Try other way round because automatic renderer could not build // graph including the sample grabber IPin iPinAVISplitterOut = DsFindPin.ByDirection(aviSplitter, PinDirection.Output, 0); IPin iPinAVISplitterIn; hr = iPinAVISplitterOut.ConnectedTo(out iPinAVISplitterIn); DsError.ThrowExceptionForHR(hr); hr = iPinAVISplitterOut.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = iPinAVISplitterIn.Disconnect(); DsError.ThrowExceptionForHR(hr); // Connect the avi splitter output to sample grabber IPin iPinSampleGrabberIn = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Input, 0); hr = graphBuilder.Connect(iPinAVISplitterOut, iPinSampleGrabberIn); DsError.ThrowExceptionForHR(hr); } // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = graphBuilder.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // Get the input pin of the null renderer IPin iPinNullRendererIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); // Connect the sample grabber to the null renderer hr = graphBuilder.Connect(iPinSampleGrabberOut, iPinNullRendererIn); DsError.ThrowExceptionForHR(hr); // Read and cache the image sizes SaveSizeInfo(sampleGrabber); this.GetFrameStepInterface(); } finally { } }
public static IBaseFilter AddAudioRendererToGraph(IGraphBuilder graphBuilder, string strFilterName, bool setAsReferenceClock) { try { IPin pinOut = null; IBaseFilter NewFilter = null; IEnumFilters enumFilters; HResult hr = new HResult(graphBuilder.EnumFilters(out enumFilters)); Log.Info("DirectShowUtils: First try to insert new audio renderer {0} ", strFilterName); // next add the new one... foreach (Filter filter in Filters.AudioRenderers) { if (String.Compare(filter.Name, strFilterName, true) == 0) { Log.Info("DirectShowUtils: Found audio renderer"); NewFilter = (IBaseFilter)Marshal.BindToMoniker(filter.MonikerString); hr.Set(graphBuilder.AddFilter(NewFilter, strFilterName)); if (hr < 0) { Log.Error("DirectShowUtils: unable to add filter:{0} to graph", strFilterName); NewFilter = null; } else { Log.Debug("DirectShowUtils: added filter:{0} to graph", strFilterName); if (pinOut != null) { hr.Set(graphBuilder.Render(pinOut)); if (hr == 0) { Log.Info(" pinout rendererd"); } else { Log.Error(" failed: pinout render"); } } if (setAsReferenceClock) { hr.Set((graphBuilder as IMediaFilter).SetSyncSource(NewFilter as IReferenceClock)); if (hr != 0) { Log.Warn("setAsReferenceClock sync source " + hr.ToDXString()); } } return NewFilter; } } //if (String.Compare(filter.Name,strFilterName,true) ==0) } //foreach (Filter filter in filters.AudioRenderers) if (NewFilter == null) { Log.Error("DirectShowUtils: failed filter {0} not found", strFilterName); } } catch {} Log.Info("DirectShowUtils: First try to insert new audio renderer {0} failed ", strFilterName); try { IPin pinOut = null; IBaseFilter NewFilter = null; Log.Info("add filter:{0} to graph clock:{1}", strFilterName, setAsReferenceClock); //check first if audio renderer exists! bool bRendererExists = false; foreach (Filter filter in Filters.AudioRenderers) { if (String.Compare(filter.Name, strFilterName, true) == 0) { bRendererExists = true; Log.Info("DirectShowUtils: found renderer - {0}", filter.Name); } } if (!bRendererExists) { Log.Error("FAILED: audio renderer:{0} doesnt exists", strFilterName); return null; } // first remove all audio renderers bool bAllRemoved = false; bool bNeedAdd = true; IEnumFilters enumFilters; HResult hr = new HResult(graphBuilder.EnumFilters(out enumFilters)); if (hr >= 0 && enumFilters != null) { int iFetched; enumFilters.Reset(); while (!bAllRemoved) { IBaseFilter[] pBasefilter = new IBaseFilter[2]; hr.Set(enumFilters.Next(1, pBasefilter, out iFetched)); if (hr < 0 || iFetched != 1 || pBasefilter[0] == null) { break; } foreach (Filter filter in Filters.AudioRenderers) { Guid classId1; Guid classId2; pBasefilter[0].GetClassID(out classId1); //Log.Info("Filter Moniker string - " + filter.Name); if (filter.Name == "ReClock Audio Renderer") { Log.Warn( "Reclock is installed - if this method fails, reinstall and regsvr32 /u reclock and then uninstall"); // return null; } try { NewFilter = (IBaseFilter)Marshal.BindToMoniker(filter.MonikerString); if (NewFilter == null) { Log.Info("NewFilter = null"); continue; } } catch (Exception e) { Log.Info("Exception in BindToMoniker({0}): {1}", filter.MonikerString, e.Message); continue; } NewFilter.GetClassID(out classId2); ReleaseComObject(NewFilter); NewFilter = null; if (classId1.Equals(classId2)) { if (filter.Name == strFilterName) { Log.Info("filter already in graph"); if (setAsReferenceClock) { hr.Set((graphBuilder as IMediaFilter).SetSyncSource(pBasefilter[0] as IReferenceClock)); if (hr != 0) { Log.Warn("setAsReferenceClock sync source " + hr.ToDXString()); } } ReleaseComObject(pBasefilter[0]); pBasefilter[0] = null; bNeedAdd = false; break; } else { Log.Info("remove " + filter.Name + " from graph"); pinOut = FindSourcePinOf(pBasefilter[0]); graphBuilder.RemoveFilter(pBasefilter[0]); bAllRemoved = true; break; } } //if (classId1.Equals(classId2)) } //foreach (Filter filter in filters.AudioRenderers) if (pBasefilter[0] != null) { ReleaseComObject(pBasefilter[0]); } } //while(!bAllRemoved) ReleaseComObject(enumFilters); } //if (hr>=0 && enumFilters!=null) Log.Info("DirectShowUtils: Passed removing audio renderer"); if (!bNeedAdd) { return null; } // next add the new one... foreach (Filter filter in Filters.AudioRenderers) { if (String.Compare(filter.Name, strFilterName, true) == 0) { Log.Info("DirectShowUtils: Passed finding Audio Renderer"); NewFilter = (IBaseFilter)Marshal.BindToMoniker(filter.MonikerString); hr.Set(graphBuilder.AddFilter(NewFilter, strFilterName)); if (hr < 0) { Log.Error("failed:unable to add filter:{0} to graph", strFilterName); NewFilter = null; } else { Log.Debug("added filter:{0} to graph", strFilterName); if (pinOut != null) { hr.Set(graphBuilder.Render(pinOut)); if (hr == 0) { Log.Info(" pinout rendererd"); } else { Log.Error(" failed: pinout render"); } } if (setAsReferenceClock) { hr.Set((graphBuilder as IMediaFilter).SetSyncSource(NewFilter as IReferenceClock)); if (hr != 0) { Log.Warn("setAsReferenceClock sync source " + hr.ToDXString()); } } return NewFilter; } } //if (String.Compare(filter.Name,strFilterName,true) ==0) } //foreach (Filter filter in filters.AudioRenderers) if (NewFilter == null) { Log.Error("failed filter:{0} not found", strFilterName); } } catch (Exception ex) { Log.Error("DirectshowUtil. Failed to add filter:{0} to graph :{1} {2} {3}", strFilterName, ex.Message, ex.Source, ex.StackTrace); } return null; }
public static bool TryConnect(IGraphBuilder graphBuilder, string filtername, IPin outputPin, bool TryNewFilters) { int hr; Log.Info("----------------TryConnect-------------"); PinInfo outputInfo; outputPin.QueryPinInfo(out outputInfo); DsUtils.FreePinInfo(outputInfo); //ListMediaTypes(outputPin); ArrayList currentfilters = GetFilters(graphBuilder); foreach (IBaseFilter filter in currentfilters) { if (TryConnect(graphBuilder, filtername, outputPin, filter)) { ReleaseFilters(currentfilters); return true; } } ReleaseFilters(currentfilters); //not found, try new filter from registry if (TryNewFilters) { Log.Info("No preloaded filter could be connected. Trying to load new one from registry"); IEnumMediaTypes enumTypes; hr = outputPin.EnumMediaTypes(out enumTypes); if (hr != 0) { Log.Debug("Failed: {0:x}", hr); return false; } Log.Debug("Got enum"); ArrayList major = new ArrayList(); ArrayList sub = new ArrayList(); Log.Debug("Getting corresponding filters"); for (;;) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } major.Add(mediaTypes[0].majorType); sub.Add(mediaTypes[0].subType); } ReleaseComObject(enumTypes); Log.Debug("Found {0} media types", major.Count); Guid[] majorTypes = (Guid[])major.ToArray(typeof (Guid)); Guid[] subTypes = (Guid[])sub.ToArray(typeof (Guid)); Log.Debug("Loading filters"); ArrayList filters = FilterHelper.GetFilters(majorTypes, subTypes, (Merit)0x00400000); Log.Debug("Loaded {0} filters", filters.Count); foreach (string name in filters) { if (!CheckFilterIsLoaded(graphBuilder, name)) { Log.Debug("Loading filter: {0}", name); IBaseFilter f = AddFilterToGraph(graphBuilder, name); if (f != null) { if (TryConnect(graphBuilder, filtername, outputPin, f)) { ReleaseComObject(f); return true; } else { graphBuilder.RemoveFilter(f); ReleaseComObject(f); } } } else { Log.Debug("Ignoring filter {0}. Already in graph.", name); } } } Log.Debug("TryConnect failed."); return outputInfo.name.StartsWith("~"); }
public static bool TryConnect(IGraphBuilder graphbuilder, IBaseFilter source, Guid mediaType, string targetFilter) { if (string.IsNullOrEmpty(targetFilter)) return false; bool connected = false; IBaseFilter destination = null; destination = AddFilterToGraph(graphbuilder, targetFilter); if (destination == null) return false; if (!TryConnect(graphbuilder, source, mediaType, destination)) { Log.Info(" - not compatible, removed"); graphbuilder.RemoveFilter(destination); } else connected = true; ReleaseComObject(destination); destination = null; return connected; }
public static void RemoveDownStreamFilters(IGraphBuilder graphBuilder, IBaseFilter fromFilter, bool remove) { IEnumPins enumPins; fromFilter.EnumPins(out enumPins); if (enumPins == null) { return; } IPin[] pins = new IPin[2]; int fetched; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection dir; pins[0].QueryDirection(out dir); if (dir != PinDirection.Output) { ReleaseComObject(pins[0]); continue; } IPin pinConnected; pins[0].ConnectedTo(out pinConnected); if (pinConnected == null) { ReleaseComObject(pins[0]); continue; } PinInfo info; pinConnected.QueryPinInfo(out info); if (info.filter != null) { RemoveDownStreamFilters(graphBuilder, info.filter, true); } DsUtils.FreePinInfo(info); ReleaseComObject(pins[0]); } if (remove) { graphBuilder.RemoveFilter(fromFilter); } ReleaseComObject(enumPins); }
public static void RemoveUnusedFiltersFromGraph(IGraphBuilder graphBuilder) { if (graphBuilder == null) return; int hr = 0; IEnumFilters enumFilters = null; ArrayList filtersArray = new ArrayList(); try { hr = graphBuilder.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); IBaseFilter[] filters = new IBaseFilter[1]; int fetched; while (enumFilters.Next(filters.Length, filters, out fetched) == 0) { filtersArray.Add(filters[0]); } foreach (IBaseFilter filter in filtersArray) { FilterInfo info; filter.QueryFilterInfo(out info); Log.Debug("Check graph connections for: {0}", info.achName); IEnumPins pinEnum; hr = filter.EnumPins(out pinEnum); DsError.ThrowExceptionForHR(hr); if (hr == 0 && pinEnum != null) { bool filterUsed = false; bool hasOut = false; bool hasIn = false; pinEnum.Reset(); IPin[] pins = new IPin[1]; while (pinEnum.Next(1, pins, out fetched) == 0) { if (fetched > 0) { PinDirection pinDir; hr = pins[0].QueryDirection(out pinDir); DsError.ThrowExceptionForHR(hr); if (pinDir == PinDirection.Output) hasOut = true; else hasIn = true; if (HasConnection(pins[0])) { filterUsed = true; break; } } } ReleaseComObject(pinEnum); if (!filterUsed && hasOut && hasIn) { hr = graphBuilder.RemoveFilter(filter); DsError.ThrowExceptionForHR(hr); if (hr == 0) Log.Debug(" - remove done"); } } ReleaseComObject(info.pGraph); ReleaseComObject(filter); } } catch (Exception error) { Log.Error("DirectShowUtil: Remove unused filters failed - {0}", error.Message); } ReleaseComObject(enumFilters); }
public static void RemoveFromGraph(IGraphBuilder graphBuilder) { IBaseFilter vob = null; using (Settings xmlreader = new MPSettings()) { string engineType = xmlreader.GetValueAsString("subtitles", "engine", "DirectVobSub"); XySubFilter = engineType.Equals("XySubFilter"); } if (!XySubFilter) { DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubAutoload, out vob); if (vob == null) { //Try the "normal" filter then. DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubNormal, out vob); } } if (vob == null) { DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.XySubFilterAutoload, out vob); if (vob != null) { //remove the XySubFilter filter from the graph graphBuilder.RemoveFilter(vob); DirectShowUtil.ReleaseComObject(vob); vob = null; return; } //Try the XySubFilter "normal" filter then. DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.XySubFilterNormal, out vob); if (vob != null) { //remove the XySubFilter filter from the graph graphBuilder.RemoveFilter(vob); DirectShowUtil.ReleaseComObject(vob); vob = null; } return; } Log.Info("VideoPlayerVMR9: DirectVobSub in graph, removing..."); // Check where video inputs are connected IPin pinVideoIn = DsFindPin.ByDirection(vob, PinDirection.Input, 0); IPin pinVideoOut = DsFindPin.ByDirection(vob, PinDirection.Output, 0); //find directvobsub's video output pin source input pin IPin pinVideoTo = null; if (pinVideoOut != null) { pinVideoOut.ConnectedTo(out pinVideoTo); } //find directvobsub's video input pin source output pin IPin pinVideoFrom = null; if (pinVideoIn != null) { pinVideoIn.ConnectedTo(out pinVideoFrom); } int hr = 0; if (pinVideoFrom != null) { hr = pinVideoFrom.Disconnect(); if (hr != 0) { Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting source pin"); } } if (pinVideoTo != null) { hr = pinVideoTo.Disconnect(); if (hr != 0) { Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting destination pin"); } } //remove the DirectVobSub filter from the graph graphBuilder.RemoveFilter(vob); DirectShowUtil.ReleaseComObject(vob); vob = null; //reconnect the source output pin to the vmr9/evr filter if (pinVideoFrom != null) { if (pinVideoTo != null) { hr = graphBuilder.Connect(pinVideoFrom, pinVideoTo); } //hr = graphBuilder.Render(pinVideoFrom); DirectShowUtil.ReleaseComObject(pinVideoFrom); pinVideoFrom = null; } if (pinVideoTo != null) { DirectShowUtil.ReleaseComObject(pinVideoTo); pinVideoTo = null; } if (pinVideoOut != null) { DirectShowUtil.ReleaseComObject(pinVideoOut); pinVideoOut = null; } if (pinVideoIn != null) { DirectShowUtil.ReleaseComObject(pinVideoIn); pinVideoIn = null; } if (hr != 0) Log.Error("VideoPlayerVMR9: Could not connect video out to video renderer: {0}", hr); else Log.Debug("VideoPlayerVMR9: DirectVobSub graph rebuild finished"); }
public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format, MediaPortal.Core.Transcoding.Quality quality, Standard standard) { if (!Supports(format)) { return(false); } string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg") { Log.Info("TSReader2MP4: wrong file format"); return(false); } try { graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2MP4: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2MP4: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader"); Cleanup(); return(false); } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) { break; } if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2MP4: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: unable to add mpeg2 video decoder"); Cleanup(); return(false); } } else { Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec"); Cleanup(); return(false); } } if (usingAAC == false) { Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return(false); } } else { Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec"); Cleanup(); return(false); } } Log.Info("TSReader2MP4: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return(false); } //add encoders, muxer & filewriter if (!AddCodecs(graphBuilder, info)) { return(false); } //setup graph controls mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration Log.Info("TSReader2MP4: Get duration of recording"); long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration)); //run the graph to initialize the filters to be sure hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) { break; } maxCount--; if (maxCount <= 0) { break; } } mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); graphBuilder.RemoveFilter(mp4Muxer); graphBuilder.RemoveFilter(h264Encoder); graphBuilder.RemoveFilter(aacEncoder); graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter); if (!AddCodecs(graphBuilder, info)) { return(false); } //Set Encoder quality & Muxer settings if (!EncoderSet(graphBuilder, info)) { return(false); } //start transcoding - run the graph Log.Info("TSReader2MP4: start transcoding"); //setup flow control //need to leverage CBAsePin, CPullPin & IAsyncReader methods. IAsyncReader synchVideo = null; mediaSample = VideoCodec as IMediaSample; hr = synchVideo.SyncReadAligned(mediaSample); //So we only parse decoder output whent the encoders are ready. hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } } catch (Exception ex) { Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message); Cleanup(); return(false); } return(true); }
public static void RemoveAllFilters(IGraphBuilder graphBuilder) { if (graphBuilder == null) throw new ArgumentNullException("graphBuilder"); IEnumFilters enumFilters = null; System.Collections.ArrayList filtersArray = new System.Collections.ArrayList(); try { int hr = graphBuilder.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); IBaseFilter[] filters = new IBaseFilter[1]; int fetched; while (enumFilters.Next(filters.Length, filters, out fetched) == 0) { filtersArray.Add(filters[0]); } foreach (IBaseFilter filter in filtersArray) { FilterInfo info; filter.QueryFilterInfo(out info); Log.Log.Write("Remove filter from graph: {0}", info.achName); graphBuilder.RemoveFilter(filter); while (Release.ComObject(filter) > 0) ; } } catch (Exception) { Log.Log.Write("Remove filter error!"); return; } finally { if (enumFilters != null) { Release.ComObject(enumFilters); } } }
public bool LoadSubtitles(IGraphBuilder graphBuilder, string filename) { LoadSettings(); MpcSubtitles.SetDefaultStyle(ref this.defStyle, this.overrideASSStyle); if (selectionOff) { MpcSubtitles.SetShowForcedOnly(false); } else { MpcSubtitles.SetShowForcedOnly(!this.autoShow); } //remove DirectVobSub DirectVobSubUtil.RemoveFromGraph(graphBuilder); { //remove InternalScriptRenderer as it takes subtitle pin IBaseFilter isr = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.InternalScriptRenderer, out isr); if (isr != null) { graphBuilder.RemoveFilter(isr); DirectShowUtil.ReleaseComObject(isr); } } FFDShowEngine.DisableFFDShowSubtitles(graphBuilder); Size size = new Size(GUIGraphicsContext.Width, GUIGraphicsContext.Height); return MpcSubtitles.LoadSubtitles( DirectShowUtil.GetUnmanagedDevice(GUIGraphicsContext.DX9Device), size, filename, graphBuilder, subPaths); }
public bool LoadSubtitles(IGraphBuilder graphBuilder, string filename) { LoadSettings(); //remove DirectVobSub DirectVobSubUtil.RemoveFromGraph(graphBuilder); { //remove InternalScriptRenderer as it takes subtitle pin IBaseFilter isr = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.InternalScriptRenderer, out isr); if (isr != null) { graphBuilder.RemoveFilter(isr); DirectShowUtil.ReleaseComObject(isr); } } // Window size //Size size = new Size(GUIGraphicsContext.Width, GUIGraphicsContext.Height); /*List<FFDShowAPI.FFDShowInstance> ffdshowInstance = FFDShowAPI.getFFDShowInstances(); FFDShowAPI.FFDShowAPI api = new FFDShowAPI();*/ IBaseFilter baseFilter = null; DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoGuid, out baseFilter); if (baseFilter == null) DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoDXVAGuid, out baseFilter); if (baseFilter == null) DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoRawGuid, out baseFilter); if (baseFilter == null) return false; ffdshowAPI = new FFDShowAPI((object)baseFilter); IffdshowDec ffdshowDec = baseFilter as IffdshowDec; if (ffdshowDec == null) { Log.Error("FFdshow interfaces not found. Try to update FFDShow"); } else Log.Info("FFdshow interfaces found"); if (selectionOff) { Enable = false; } else { Enable = autoShow; } return true; }
public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard) { try { if (!Supports(format)) return false; string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLowerInvariant() != ".dvr-ms" && ext.ToLowerInvariant() != ".sbe") { Log.Info("DVRMS2WMV: wrong file format"); return false; } Log.Info("DVRMS2WMV: create graph"); graphBuilder = (IGraphBuilder)new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("DVRMS2WMV: add streambuffersource"); bufferSource = (IStreamBufferSource)new StreamBufferSource(); IBaseFilter filter = (IBaseFilter)bufferSource; graphBuilder.AddFilter(filter, "SBE SOURCE"); Log.Info("DVRMS2WMV: load file:{0}", info.file); IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource; int hr = fileSource.Load(info.file, null); //add mpeg2 audio/video codecs string strVideoCodec = ""; string strAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "MPC - MPEG-2 Video Decoder (Gabest)"); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "MPC - MPA Decoder Filter"); } Log.Info("DVRMS2WMV: add mpeg2 video codec:{0}", strVideoCodec); Mpeg2VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:Add mpeg2 video to filtergraph :0x{0:X}", hr); Cleanup(); return false; } Log.Info("DVRMS2WMV: add mpeg2 audio codec:{0}", strAudioCodec); Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (Mpeg2AudioCodec == null) { Log.Error("DVRMS2WMV:FAILED:unable to add mpeg2 audio codec"); Cleanup(); return false; } Log.Info("DVRMS2WMV: connect streambufer source->mpeg audio/video decoders"); //connect output #0 of streambuffer source->mpeg2 audio codec pin 1 //connect output #1 of streambuffer source->mpeg2 video codec pin 1 IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("DVRMS2WMV:FAILED:unable to get pins of source"); Cleanup(); return false; } pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio if (pinIn0 == null || pinIn1 == null) { Log.Error("DVRMS2WMV:FAILED:unable to get pins of mpeg2 video/audio codec"); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut0, pinIn1); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:unable to connect audio pins :0x{0:X}", hr); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut1, pinIn0); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:unable to connect video pins :0x{0:X}", hr); Cleanup(); return false; } string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv"); if (!AddWmAsfWriter(outputFilename, quality, standard)) return false; Log.Info("DVRMS2WMV: start pre-run"); mediaControl = graphBuilder as IMediaControl; mediaSeeking = bufferSource as IStreamBufferMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("DVRMS2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration)); hr = mediaControl.Run(); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) break; maxCount--; if (maxCount <= 0) break; } Log.Info("DVRMS2WMV: pre-run done"); Log.Info("DVRMS2WMV: Get duration of movie"); mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); Log.Info("DVRMS2WMV: reconnect mpeg2 video codec->ASF WM Writer"); graphBuilder.RemoveFilter(fileWriterbase); if (!AddWmAsfWriter(outputFilename, quality, standard)) return false; Log.Info("DVRMS2WMV: Start transcoding"); hr = mediaControl.Run(); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } } catch (Exception e) { // TODO: Handle exceptions. Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message); return false; } return true; }
public static void RemoveFromGraph(IGraphBuilder graphBuilder) { IBaseFilter vob = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubAutoload, out vob); if (vob == null) { //Try the "normal" filter then. DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubNormal, out vob); if (vob == null) return; } Log.Info("VideoPlayerVMR9: DirectVobSub in graph, removing..."); // Check where video inputs are connected IPin pinVideoIn = DsFindPin.ByDirection(vob, PinDirection.Input, 0); IPin pinVideoOut = DsFindPin.ByDirection(vob, PinDirection.Output, 0); //find directvobsub's video output pin source input pin IPin pinVideoTo = null; pinVideoOut.ConnectedTo(out pinVideoTo); //find directvobsub's video input pin source output pin IPin pinVideoFrom = null; pinVideoIn.ConnectedTo(out pinVideoFrom); int hr; if (pinVideoFrom != null) { hr = pinVideoFrom.Disconnect(); if (hr != 0) { Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting source pin"); } } if (pinVideoTo != null) { hr = pinVideoTo.Disconnect(); if (hr != 0) { Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting destination pin"); } } //remove the DirectVobSub filter from the graph graphBuilder.RemoveFilter(vob); DirectShowUtil.ReleaseComObject(vob); vob = null; //reconnect the source output pin to the vmr9/evr filter hr = graphBuilder.Connect(pinVideoFrom, pinVideoTo); //hr = graphBuilder.Render(pinVideoFrom); DirectShowUtil.ReleaseComObject(pinVideoFrom); pinVideoFrom = null; DirectShowUtil.ReleaseComObject(pinVideoTo); pinVideoTo = null; DirectShowUtil.ReleaseComObject(pinVideoOut); pinVideoOut = null; DirectShowUtil.ReleaseComObject(pinVideoIn); pinVideoIn = null; if (hr != 0) Log.Error("VideoPlayerVMR9: Could not connect video out to video renderer: {0}", hr); else Log.Debug("VideoPlayerVMR9: DirectVobSub graph rebuild finished"); }
public bool LoadSubtitles(IGraphBuilder graphBuilder, string filename) { LoadSettings(); MpcSubtitles.SetDefaultStyle(ref this.defStyle, this.overrideASSStyle); if (selectionOff) { MpcSubtitles.SetShowForcedOnly(false); } else { MpcSubtitles.SetShowForcedOnly(!this.autoShow); } //remove DirectVobSub DirectVobSubUtil.RemoveFromGraph(graphBuilder); { //remove InternalScriptRenderer as it takes subtitle pin IBaseFilter isr = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.InternalScriptRenderer, out isr); if (isr != null) { graphBuilder.RemoveFilter(isr); DirectShowUtil.ReleaseComObject(isr); } } FFDShowEngine.DisableFFDShowSubtitles(graphBuilder); Size size = new Size(GUIGraphicsContext.Width, GUIGraphicsContext.Height); // Get Default Language from MP Setting and parse it to MPC-HC Engine (needed for forced track) string defaultLanguageCulture = "EN"; string localizedCINameSub = "EN"; int lcidCI = 0; using (Settings xmlreader = new MPSettings()) { try { if (g_Player.IsVideo && (g_Player.CurrentFile.ToUpperInvariant().Contains(@"\BDMV\INDEX.BDMV"))) { localizedCINameSub = (xmlreader.GetValueAsString("bdplayer", "subtitlelanguage", "English")); foreach (CultureInfo ci in CultureInfo.GetCultures(CultureTypes.NeutralCultures)) { if (ci.EnglishName == localizedCINameSub) { lcidCI = ci.TextInfo.LCID;; } } Log.Info("MpcEngine: Subtitle Blu-ray Player CultureInfo {0}", localizedCINameSub); } else { CultureInfo ci = new CultureInfo(xmlreader.GetValueAsString("subtitles", "language", defaultLanguageCulture)); lcidCI = ci.TextInfo.LCID; Log.Info("MpcEngine: Subtitle VideoPlayer CultureInfo {0}", ci); } } catch (Exception ex) { CultureInfo ci = new CultureInfo(defaultLanguageCulture); lcidCI = ci.TextInfo.LCID; Log.Error( "MpcEngine: SelectSubtitleLanguage - unable to build CultureInfo, make sure MediaPortal.xml is not corrupted! - {0}", ex); } } return MpcSubtitles.LoadSubtitles( DirectShowUtil.GetUnmanagedDevice(GUIGraphicsContext.DX9Device), size, filename, graphBuilder, subPaths, lcidCI); }
public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard) { try { if (!Supports(format)) return false; string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg") { Log.Info("TSReader2WMV: wrong file format"); return false; } Log.Info("TSReader2WMV: create graph"); graphBuilder = (IGraphBuilder)new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2WMV: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; //ITSReader ireader = (ITSReader)reader; //ireader.SetTsReaderCallback(this); //ireader.SetRequestAudioChangeCallback(this); IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2WMV: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2WMV: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2WMV: FAILED: unable to get output pins of tsreader"); Cleanup(); return false; } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2WMV: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2WMV: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2WMV: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2WMV: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2WMV: unable to add mpeg2 video decoder"); Cleanup(); return false; } } else { Log.Info("TSReader2WMV: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add h264 video codec"); Cleanup(); return false; } } if (usingAAC == false) { Log.Info("TSReader2WMV: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return false; } } else { Log.Info("TSReader2WMV: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2WMV: FAILED:unable to add aac audio codec"); Cleanup(); return false; } } Log.Info("TSReader2WMV: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2WMV: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return false; } string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv"); if (!AddWmAsfWriter(outputFilename, quality, standard)) return false; Log.Info("TSReader2WMV: start pre-run"); mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration)); hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2WMV: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return false; } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) break; maxCount--; if (maxCount <= 0) break; } Log.Info("TSReader2WMV: pre-run done"); Log.Info("TSReader2WMV: Get duration of movie"); mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); Log.Info("TSReader2WMV: reconnect mpeg2 video codec->ASF WM Writer"); graphBuilder.RemoveFilter(fileWriterbase); if (!AddWmAsfWriter(outputFilename, quality, standard)) return false; Log.Info("TSReader2WMV: Start transcoding"); hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2WMV:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } } catch (Exception e) { // TODO: Handle exceptions. Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message); return false; } return true; }
public bool LoadSubtitles(IGraphBuilder graphBuilder, string filename) { LoadSettings(); //remove DirectVobSub DirectVobSubUtil.RemoveFromGraph(graphBuilder); { //remove InternalScriptRenderer as it takes subtitle pin IBaseFilter isr = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.InternalScriptRenderer, out isr); if (isr != null) { graphBuilder.RemoveFilter(isr); DirectShowUtil.ReleaseComObject(isr); } } // Window size //Size size = new Size(GUIGraphicsContext.Width, GUIGraphicsContext.Height); /*List<FFDShowAPI.FFDShowInstance> ffdshowInstance = FFDShowAPI.getFFDShowInstances(); * FFDShowAPI.FFDShowAPI api = new FFDShowAPI();*/ IBaseFilter baseFilter = null; DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoGuid, out baseFilter); if (baseFilter == null) { DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoDXVAGuid, out baseFilter); } if (baseFilter == null) { DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoRawGuid, out baseFilter); } if (baseFilter == null) { return(false); } ffdshowAPI = new FFDShowAPI((object)baseFilter); IffdshowDec ffdshowDec = baseFilter as IffdshowDec; if (ffdshowDec == null) { Log.Error("FFdshow interfaces not found. Try to update FFDShow"); } else { Log.Info("FFdshow interfaces found"); } if (selectionOff) { Enable = false; } else { Enable = autoShow; } return(true); }
public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format, MediaPortal.Core.Transcoding.Quality quality, Standard standard) { if (!Supports(format)) return false; string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLowerInvariant() != ".ts" && ext.ToLowerInvariant() != ".mpg") { Log.Info("TSReader2MP4: wrong file format"); return false; } try { graphBuilder = (IGraphBuilder)new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2MP4: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2MP4: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader"); Cleanup(); return false; } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2MP4: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: unable to add mpeg2 video decoder"); Cleanup(); return false; } } else { Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec"); Cleanup(); return false; } } if (usingAAC == false) { Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return false; } } else { Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec"); Cleanup(); return false; } } Log.Info("TSReader2MP4: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return false; } //add encoders, muxer & filewriter if (!AddCodecs(graphBuilder, info)) return false; //setup graph controls mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration Log.Info("TSReader2MP4: Get duration of recording"); long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration)); //run the graph to initialize the filters to be sure hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return false; } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) break; maxCount--; if (maxCount <= 0) break; } mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); graphBuilder.RemoveFilter(mp4Muxer); graphBuilder.RemoveFilter(h264Encoder); graphBuilder.RemoveFilter(aacEncoder); graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter); if (!AddCodecs(graphBuilder, info)) return false; //Set Encoder quality & Muxer settings if (!EncoderSet(graphBuilder, info)) return false; //start transcoding - run the graph Log.Info("TSReader2MP4: start transcoding"); //setup flow control //need to leverage CBAsePin, CPullPin & IAsyncReader methods. IAsyncReader synchVideo = null; mediaSample = VideoCodec as IMediaSample; hr = synchVideo.SyncReadAligned(mediaSample); //So we only parse decoder output whent the encoders are ready. hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } } catch (Exception ex) { Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message); Cleanup(); return false; } return true; }
/* * TIVO Files Pin Mapping (pin name between ||) (NOTE: XXXX changes from each machine and AC3 changes if the audio codec changes) * Audio -> Source Pin |Output| -> MainConcept MPEG DeMultiplexer |Input| |AC3 (PID XXXX @ Prog# 1)| -> Dump |Input| * Video -> Source Pin |Output| -> MainConcept MPEG DeMultiplexer |Input| |Video (PID XXXX @ Prog# 1)| -> Dump |Input| */ public void BuildGraph() { int hr; IntPtr fetched = IntPtr.Zero; IntPtr fetched2 = IntPtr.Zero; IEnumPins FilterPins; IPin[] pins = new IPin[1]; string PinID; // TiVO Directshow filters are only accessible through userspace otherwise decryption fails, so if we are running the engine as a service (instead of command line) we should prompt the user if ((_Ext == "tivo") && GlobalDefs.IsEngineRunningAsService) { _jobLog.WriteEntry(this, "You need to start MCEBuddy engine as a Command line program. TiVO Desktop Directshow decryption filters do not work with a Windows Service.", Log.LogEntryType.Error); } // Create the source filter for dvrms or wtv or TIVO (will automatically connect to TIVODecryptorTag in source itself) _jobLog.WriteEntry(this, "Loading file using DirectShow source filter", Log.LogEntryType.Debug); hr = _gb.AddSourceFilter(_SourceFile, "Source Filter", out _SourceF); checkHR(hr); // If this is a TIVO while, while the source filter automatically decrypts the inputs we need to connect the MPEG demultiplexer to get the audio and video output pins if (_Ext == "tivo") { IPin PinOut, PinIn; IntPtr ptr; PinInfo demuxPinInfo; List <IBaseFilter> filterList = new List <IBaseFilter>(); // Check if the source filter is a TiVO source filter (otherwise sometimes it tries to use the normal source filter which will fail since the stream in encrypted) string vendorInfo; FilterInfo filterInfo; _SourceF.QueryFilterInfo(out filterInfo); _SourceF.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "TiVO Source filter loaded by Directshow -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); if (vendorInfo == null || !vendorInfo.ToLower().Contains("tivo")) { string exception = ""; // Check if you are running 64Bit MCEBuddy, TiVO needs 32bit MCEBuddy since TiVO directshow dll are 32bit and can only be loaded by 32bit processes if (IntPtr.Size == 8) { exception += "You need to run 32bit MCEBuddy, TiVO Directshow fiters cannot be accessed by a 64bit program."; } else { exception += "TiVO Desktop installation not detected by Windows DirectShow."; } throw new Exception(exception); // Get out of here and let the parent know something is wrong } hr = _SourceF.FindPin("Output", out PinOut); // Get the Source filter pinOut |Output| checkHR(hr); // When TIVO desktop is installed, Render automatically builds the filter graph with the necessary demuxing filters - we cannot manually add the MainConcept demux filter since the class isn't registered but somehow Render is able to find it and load it (along with other redundant filters like DTV, audio etc which we need to remove) _jobLog.WriteEntry(this, "DirectShow building TiVO filter chain", Log.LogEntryType.Debug); hr = _gb.Render(PinOut); checkHR(hr); hr = PinOut.ConnectedTo(out ptr); // Find out which input Pin (Mainconcept Demux filter) the output of the Source Filter is connected to checkHR(hr); PinIn = (IPin)Marshal.GetObjectForIUnknown(ptr); hr = PinIn.QueryPinInfo(out demuxPinInfo); // Get the mainconcept demux filter from the pin checkHR(hr); demuxPinInfo.filter.QueryFilterInfo(out filterInfo); demuxPinInfo.filter.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "Checking downstream TiVO filter chain starting with TiVO Demux filter -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); if (!GetFilterChain(demuxPinInfo.filter, PinDirection.Output, filterList)) // Get the list of all downstreams (redudant) filters (like DTV, Audio, video render etc) from the demux filter that were added by the automatic Render function above (check if there are no downstream filters, then TIVO desktop is not installed) { throw new Exception("Unable to get TIVO filter chain"); } // Now remove all the filters in the chain downstream after the demux filter from the graph builder (we dont' need them, we will add out own filters later) _jobLog.WriteEntry(this, "Removing redundant filters from TiVO filter chain", Log.LogEntryType.Debug); foreach (IBaseFilter filter in filterList) { filter.QueryFilterInfo(out filterInfo); filter.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "Removing filter -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); _gb.RemoveFilter(filter); Marshal.FinalReleaseComObject(filter); // Release the COM object } // Now the TIVO MainConcept Demux Filter is our new "Source" filter _SourceF = demuxPinInfo.filter; } // TODO: We need to find a way to insert a filter which can allow us to select audio streams (e.g. LAV filter, currently it only allows us access to the default audio stream and not multiple audio streams) // Cycle through pins, connecting as appropriate hr = _SourceF.EnumPins(out FilterPins); checkHR(hr); while (FilterPins.Next(pins.Length, pins, fetched) == 0) { IntPtr ptypes = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(IntPtr))); AMMediaType mtypes; IEnumMediaTypes enummtypes; IntPtr ptrEnum; pins[0].EnumMediaTypes(out ptrEnum); enummtypes = (IEnumMediaTypes)Marshal.GetObjectForIUnknown(ptrEnum); while (enummtypes.Next(1, ptypes, fetched2) == 0) { /* Extract Audio, Video or Subtitle streams -> References: * http://nate.deepcreek.org.au/svn/DigitalWatch/trunk/bin/MediaTypes.txt * http://msdn.microsoft.com/en-us/library/ms932033.aspx * https://sourceforge.net/p/tsubget/home/Dumping%20a%20Stream/ * http://msdn.microsoft.com/en-us/library/windows/desktop/dd695343(v=vs.85).aspx * http://msdn.microsoft.com/en-us/library/windows/desktop/dd390660(v=vs.85).aspx * http://msdn.microsoft.com/en-us/library/windows/desktop/dd407354(v=vs.85).aspx * http://whrl.pl/RcRv5p (extracting Teletext from WTV/DVRMS) */ IntPtr ptrStructure = Marshal.ReadIntPtr(ptypes); mtypes = (AMMediaType)Marshal.PtrToStructure(ptrStructure, typeof(AMMediaType)); if ((mtypes.majorType == MediaType.Video) || (mtypes.majorType == MediaType.Audio) || (mtypes.majorType == MediaType.Mpeg2PES) || (mtypes.majorType == MediaType.Stream) || (mtypes.majorType == MediaType.AuxLine21Data) || (mtypes.majorType == MediaType.VBI) || (mtypes.majorType == MediaType.MSTVCaption) || (mtypes.majorType == MediaType.DTVCCData) || (mtypes.majorType == MediaType.Mpeg2Sections && mtypes.subType == MediaSubType.None && mtypes.formatType == FormatType.None)) { string DumpFileName = ""; if ((mtypes.majorType == MediaType.Video) && ((_extractMediaType & ExtractMediaType.Video) != 0)) // Video { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_VIDEO"); _VideoPart = DumpFileName; _jobLog.WriteEntry(this, "Found Video stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } else if (((mtypes.majorType == MediaType.Audio) || // Audio types https://msdn.microsoft.com/en-us/library/windows/desktop/dd390676(v=vs.85).aspx ((mtypes.majorType == MediaType.Mpeg2PES) && ((mtypes.subType == MediaSubType.DolbyAC3) || (mtypes.subType == MediaSubType.DTS) || (mtypes.subType == MediaSubType.DvdLPCMAudio) || (mtypes.subType == MediaSubType.Mpeg2Audio))) || ((mtypes.majorType == MediaType.Stream) && ((mtypes.subType == MediaSubType.DolbyAC3) || (mtypes.subType == MediaSubType.MPEG1Audio) || (mtypes.subType == MediaSubType.Mpeg2Audio) || (mtypes.subType == MediaSubType.DolbyDDPlus) || (mtypes.subType == MediaSubType.MpegADTS_AAC) || (mtypes.subType == MediaSubType.MpegLOAS))) ) && ((_extractMediaType & ExtractMediaType.Audio) != 0)) { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_AUDIO" + AudioParts.Count.ToString()); _AudioParts.Add(DumpFileName); _jobLog.WriteEntry(this, "Found Audio stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } else if ((_extractMediaType & ExtractMediaType.Subtitle) != 0)// Subtitles { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_SUBTITLE" + SubtitleParts.Count.ToString()); SubtitleParts.Add(DumpFileName); _jobLog.WriteEntry(this, "Found Subtitle stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } if (!String.IsNullOrWhiteSpace(DumpFileName)) // If we are asked to extract something { hr = pins[0].QueryId(out PinID); ConnectDecryptedDump(PinID, DumpFileName); } } else { // Debug - looking for more subtitle types (very poorly documented by Microsoft) Guid type = mtypes.majorType; Guid subtype = mtypes.subType; Guid formattyype = mtypes.formatType; } } Marshal.FreeCoTaskMem(ptypes); // Free up the memory } }
/// <summary> /// Opens the media by initializing the DirectShow graph /// </summary> protected virtual void OpenSource() { /* Make sure we clean up any remaining mess */ FreeResources(); if (m_sourceUri == null) return; string fileSource = m_sourceUri.OriginalString; if (string.IsNullOrEmpty(fileSource)) return; try { /* Creates the GraphBuilder COM object */ m_graph = new FilterGraphNoThread() as IGraphBuilder; if (m_graph == null) throw new Exception("Could not create a graph"); var filterGraph = m_graph as IFilterGraph2; if (filterGraph == null) throw new Exception("Could not QueryInterface for the IFilterGraph2"); IBaseFilter sourceFilter; int hr; // Set LAV Splitter /* LAVSplitterSource reader = new LAVSplitterSource(); sourceFilter = reader as IBaseFilter; var objectWithSite = reader as IObjectWithSite; if (objectWithSite != null) { objectWithSite.SetSite(this); } hr = m_graph.AddFilter(sourceFilter, SplitterSource); DsError.ThrowExceptionForHR(hr);*/ sourceFilter = DirectShowUtil.AddFilterToGraph(m_graph, SplitterSource, Guid.Empty); IFileSourceFilter interfaceFile = (IFileSourceFilter)sourceFilter; hr = interfaceFile.Load(fileSource, null); DsError.ThrowExceptionForHR(hr); // Set Video Codec // Remove Pin var videoPinFrom = DirectShowLib.DsFindPin.ByName(sourceFilter, "Video"); IPin videoPinTo; if (videoPinFrom != null) { hr = videoPinFrom.ConnectedTo(out videoPinTo); if (hr >= 0 && videoPinTo != null) { PinInfo pInfo; videoPinTo.QueryPinInfo(out pInfo); FilterInfo fInfo; pInfo.filter.QueryFilterInfo(out fInfo); DirectShowUtil.DisconnectAllPins(m_graph, pInfo.filter); m_graph.RemoveFilter(pInfo.filter); DsUtils.FreePinInfo(pInfo); Marshal.ReleaseComObject(fInfo.pGraph); Marshal.ReleaseComObject(videoPinTo); videoPinTo = null; } Marshal.ReleaseComObject(videoPinFrom); videoPinFrom = null; } DirectShowUtil.AddFilterToGraph(m_graph, VideoDecoder, Guid.Empty); try { // Set Audio Codec // Remove Pin var audioPinFrom = DirectShowLib.DsFindPin.ByName(sourceFilter, "Audio"); IPin audioPinTo; if (audioPinFrom != null) { hr = audioPinFrom.ConnectedTo(out audioPinTo); if (hr >= 0 && audioPinTo != null) { PinInfo pInfo; audioPinTo.QueryPinInfo(out pInfo); FilterInfo fInfo; pInfo.filter.QueryFilterInfo(out fInfo); DirectShowUtil.DisconnectAllPins(m_graph, pInfo.filter); m_graph.RemoveFilter(pInfo.filter); DsUtils.FreePinInfo(pInfo); Marshal.ReleaseComObject(fInfo.pGraph); Marshal.ReleaseComObject(audioPinTo); audioPinTo = null; } Marshal.ReleaseComObject(audioPinFrom); audioPinFrom = null; } DirectShowUtil.AddFilterToGraph(m_graph, AudioDecoder, Guid.Empty); /* Add our prefered audio renderer */ InsertAudioRenderer(AudioRenderer); } catch { // No Audio available Trace.TraceError("No Audio Device found!"); } IBaseFilter renderer = CreateVideoRenderer(VideoRenderer, m_graph, 2); /* We will want to enum all the pins on the source filter */ IEnumPins pinEnum; hr = sourceFilter.EnumPins(out pinEnum); DsError.ThrowExceptionForHR(hr); IntPtr fetched = IntPtr.Zero; IPin[] pins = { null }; /* Counter for how many pins successfully rendered */ int pinsRendered = 0; if (VideoRenderer == VideoRendererType.VideoMixingRenderer9) { var mixer = renderer as IVMRMixerControl9; if (mixer != null) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetRGB; //mixer.SetMixingPrefs(dwPrefs); } } /* Loop over each pin of the source filter */ while (pinEnum.Next(pins.Length, pins, fetched) == 0) { if (filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero) >= 0) pinsRendered++; Marshal.ReleaseComObject(pins[0]); } Marshal.ReleaseComObject(pinEnum); Marshal.ReleaseComObject(sourceFilter); if (pinsRendered == 0) throw new Exception("Could not render any streams from the source Uri"); #if DEBUG /* Adds the GB to the ROT so we can view * it in graphedit */ m_dsRotEntry = new DsROTEntry(m_graph); #endif /* Configure the graph in the base class */ SetupFilterGraph(m_graph); HasVideo = true; } catch (Exception ex) { /* This exection will happen usually if the media does * not exist or could not open due to not having the * proper filters installed */ // Fallback try auto graph: var result = oldOpenSource(); if (!result) { FreeResources(); /* Fire our failed event */ InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); } } InvokeMediaOpened(); }
public static void RemoveAllFilters(IGraphBuilder graphBuilder) { int hr = 0; IEnumFilters enumFilters; var filtersArray = new ArrayList(); if (graphBuilder == null) throw new ArgumentNullException("graphBuilder"); hr = graphBuilder.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); try { var filters = new IBaseFilter[1]; IntPtr fetched = IntPtr.Zero; while (enumFilters.Next(filters.Length, filters, fetched) == 0) { filtersArray.Add(filters[0]); } } finally { Marshal.ReleaseComObject(enumFilters); } foreach (IBaseFilter filter in filtersArray) { hr = graphBuilder.RemoveFilter(filter); Marshal.ReleaseComObject(filter); } }
public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format, MediaPortal.Core.Transcoding.Quality quality, Standard standard) { if (!Supports(format)) return false; string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLower() != ".dvr-ms" && ext.ToLower() != ".sbe") { Log.Info("DVRMS2DIVX: wrong file format"); return false; } //disable xvid status window while encoding /* try { using (RegistryKey subkey = Registry.CurrentUser.OpenSubKey(@"Software\GNU\XviD", true)) { if (subkey != null) { Int32 uivalue = 0; subkey.SetValue("display_status", (Int32)uivalue); subkey.SetValue("debug", (Int32)uivalue); subkey.SetValue("bitrate", (Int32)bitrate); uivalue = 1; subkey.SetValue("interlacing", (Int32)uivalue); } } } catch (Exception) { }*/ //Type comtype = null; //object comobj = null; try { graphBuilder = (IGraphBuilder)new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("DVRMS2DIVX: add filesource"); bufferSource = (IStreamBufferSource)new StreamBufferSource(); IBaseFilter filter = (IBaseFilter)bufferSource; graphBuilder.AddFilter(filter, "SBE SOURCE"); IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource; Log.Info("DVRMS2DIVX: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); /*string strDemuxerMoniker = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{AFB6C280-2C41-11D3-8A60-0000F81E0E4A}"; mpegDemuxer = Marshal.BindToMoniker(strDemuxerMoniker) as IBaseFilter; if (mpegDemuxer == null) { Log.Error("DVRMS2DIVX:FAILED:unable to add mpeg2 demuxer"); Cleanup(); return false; } hr = graphBuilder.AddFilter(mpegDemuxer, "MPEG-2 Demultiplexer"); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:Add mpeg2 demuxer to filtergraph :0x{0:X}", hr); Cleanup(); return false; }*/ //add mpeg2 audio/video codecs string strVideoCodecMoniker = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{39F498AF-1A09-4275-B193-673B0BA3D478}"; string strAudioCodec = "MPC - MPA Decoder Filter"; Log.Info("DVRMS2DIVX: add MPV mpeg2 video decoder"); Mpeg2VideoCodec = Marshal.BindToMoniker(strVideoCodecMoniker) as IBaseFilter; if (Mpeg2VideoCodec == null) { Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 video decoder"); Cleanup(); return false; } hr = graphBuilder.AddFilter(Mpeg2VideoCodec, "MPC - MPEG-2 Video Decoder (Gabest)"); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:Add MPV mpeg2 video to filtergraph :0x{0:X}", hr); Cleanup(); return false; } Log.Info("DVRMS2DIVX: add MPA mpeg2 audio codec:{0}", strAudioCodec); Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (Mpeg2AudioCodec == null) { Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 audio codec"); Cleanup(); return false; } //connect output #0 of streambuffer source->mpeg2 audio codec pin 1 //connect output #1 of streambuffer source->mpeg2 video codec pin 1 Log.Info("DVRMS2DIVX: connect streambufer source->mpeg audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("DVRMS2DIVX:FAILED:unable to get pins of source"); Cleanup(); return false; } pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio if (pinIn0 == null || pinIn1 == null) { Log.Error("DVRMS2DIVX:FAILED:unable to get pins of mpeg2 video/audio codec"); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut0, pinIn1); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:unable to connect audio pins :0x{0:X}", hr); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut1, pinIn0); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:unable to connect video pins :0x{0:X}", hr); Cleanup(); return false; } if (!AddCodecs(graphBuilder, info)) return false; // hr=(graphBuilder as IMediaFilter).SetSyncSource(null); // if (hr!=0) // Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr); mediaControl = graphBuilder as IMediaControl; mediaSeeking = bufferSource as IStreamBufferMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration Log.Info("DVRMS2DIVX: Get duration of movie"); long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("DVRMS2DIVX: movie duration:{0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration)); // hr=(graphBuilder as IMediaFilter).SetSyncSource(null); // if (hr!=0) // Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr); hr = mediaControl.Run(); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) break; maxCount--; if (maxCount <= 0) break; } mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); graphBuilder.RemoveFilter(aviMuxer); graphBuilder.RemoveFilter(divxCodec); graphBuilder.RemoveFilter(mp3Codec); graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter); if (!AddCodecs(graphBuilder, info)) return false; // hr=(graphBuilder as IMediaFilter).SetSyncSource(null); // if (hr!=0) // Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr); Log.Info("DVRMS2DIVX: start transcoding"); hr = mediaControl.Run(); if (hr != 0) { Log.Error("DVRMS2DIVX:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } } catch (Exception ex) { Log.Error("DVRMS2DIVX:Unable create graph: {0}", ex.Message); Cleanup(); return false; } return true; }
public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard) { try { if (!Supports(format)) { return(false); } string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLower() != ".dvr-ms" && ext.ToLower() != ".sbe") { Log.Info("DVRMS2WMV: wrong file format"); return(false); } Log.Info("DVRMS2WMV: create graph"); graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("DVRMS2WMV: add streambuffersource"); bufferSource = (IStreamBufferSource) new StreamBufferSource(); IBaseFilter filter = (IBaseFilter)bufferSource; graphBuilder.AddFilter(filter, "SBE SOURCE"); Log.Info("DVRMS2WMV: load file:{0}", info.file); IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource; int hr = fileSource.Load(info.file, null); //add mpeg2 audio/video codecs string strVideoCodec = ""; string strAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "MPC - MPEG-2 Video Decoder (Gabest)"); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "MPC - MPA Decoder Filter"); } Log.Info("DVRMS2WMV: add mpeg2 video codec:{0}", strVideoCodec); Mpeg2VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:Add mpeg2 video to filtergraph :0x{0:X}", hr); Cleanup(); return(false); } Log.Info("DVRMS2WMV: add mpeg2 audio codec:{0}", strAudioCodec); Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (Mpeg2AudioCodec == null) { Log.Error("DVRMS2WMV:FAILED:unable to add mpeg2 audio codec"); Cleanup(); return(false); } Log.Info("DVRMS2WMV: connect streambufer source->mpeg audio/video decoders"); //connect output #0 of streambuffer source->mpeg2 audio codec pin 1 //connect output #1 of streambuffer source->mpeg2 video codec pin 1 IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("DVRMS2WMV:FAILED:unable to get pins of source"); Cleanup(); return(false); } pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio if (pinIn0 == null || pinIn1 == null) { Log.Error("DVRMS2WMV:FAILED:unable to get pins of mpeg2 video/audio codec"); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut0, pinIn1); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:unable to connect audio pins :0x{0:X}", hr); Cleanup(); return(false); } hr = graphBuilder.Connect(pinOut1, pinIn0); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:unable to connect video pins :0x{0:X}", hr); Cleanup(); return(false); } string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv"); if (!AddWmAsfWriter(outputFilename, quality, standard)) { return(false); } Log.Info("DVRMS2WMV: start pre-run"); mediaControl = graphBuilder as IMediaControl; mediaSeeking = bufferSource as IStreamBufferMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("DVRMS2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration)); hr = mediaControl.Run(); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) { break; } maxCount--; if (maxCount <= 0) { break; } } Log.Info("DVRMS2WMV: pre-run done"); Log.Info("DVRMS2WMV: Get duration of movie"); mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); Log.Info("DVRMS2WMV: reconnect mpeg2 video codec->ASF WM Writer"); graphBuilder.RemoveFilter(fileWriterbase); if (!AddWmAsfWriter(outputFilename, quality, standard)) { return(false); } Log.Info("DVRMS2WMV: Start transcoding"); hr = mediaControl.Run(); if (hr != 0) { Log.Error("DVRMS2WMV:FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return(false); } } catch (Exception e) { // TODO: Handle exceptions. Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message); return(false); } return(true); }
private void PlayMovieInWindow(string filename) { WindowsMediaLib.IWMReaderAdvanced2 wmReader = null; IBaseFilter sourceFilter = null; try { FileLogger.Log("PlayMovieInWindow: {0}", filename); lastJump = 0; int hr = 0; if (filename == string.Empty) return; this.graphBuilder = (IGraphBuilder)new FilterGraph(); FileLogger.Log("PlayMovieInWindow: Create Graph"); this.evrRenderer = FilterGraphTools.AddFilterFromClsid(this.graphBuilder, new Guid("{FA10746C-9B63-4B6C-BC49-FC300EA5F256}"), "EVR"); if (evrRenderer != null) { FileLogger.Log("PlayMovieInWindow: Add EVR"); SetupEvrDisplay(); //#if DEBUG if (ps.PublishGraph) rot = new DsROTEntry(this.graphBuilder); //#endif IObjectWithSite grfSite = graphBuilder as IObjectWithSite; if (grfSite != null) grfSite.SetSite(new FilterBlocker(filename)); string fileExt = Path.GetExtension(filename).ToLower(); if (ps.PreferredDecoders != null) { foreach (string pa in ps.PreferredDecoders) { string[] pvA = pa.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); if (pvA[0].ToLower() == fileExt) { for (int i = 1; i < pvA.Length; i++) { string strFilter = pvA[i].Trim(); IBaseFilter filter = null; try { if (Regex.IsMatch(strFilter, @"{?\w{8}-\w{4}-\w{4}-\w{4}-\w{12}}?")) filter = FilterGraphTools.AddFilterFromClsid(graphBuilder, new Guid(strFilter), strFilter); else filter = FilterGraphTools.AddFilterByName(graphBuilder, FilterCategory.LegacyAmFilterCategory, strFilter); if (filter != null) { FileLogger.Log("Added {0} to the graph", strFilter); } else FileLogger.Log("{0} not added to the graph", strFilter); } finally { if (filter != null) Marshal.ReleaseComObject(filter); filter = null; } } } } } // Have the graph builder construct its the appropriate graph automatically //hr = this.graphBuilder.RenderFile(filename, null); if (ps.UseCustomAudioRenderer) { m_audioRendererClsid = new Guid(ps.CustomAudioRender); } audioRenderer = FilterGraphTools.AddFilterFromClsid(graphBuilder, m_audioRendererClsid, "Audio Renderer"); //IAVSyncClock wtf = audioRenderer as IAVSyncClock; //double cap; //hr = wtf.GetBias(out cap); //IMPAudioSettings arSett = audioRenderer as IMPAudioSettings; //if (arSett != null) //{ // AC3Encoding ac3Mode; // hr = arSett.GetAC3EncodingMode(out ac3Mode); // SpeakerConfig sc; // hr = arSett.GetSpeakerConfig(out sc); // AUDCLNT_SHAREMODE sm; // hr = arSett.GetWASAPIMode(out sm); // bool em; // hr = arSett.GetUseWASAPIEventMode(out em); // /*DeviceDefinition[] */IntPtr dc; // //int count; // //hr = arSett.GetAvailableAudioDevices(out dc, out count); // //DsError.ThrowExceptionForHR(hr); // ////DeviceDefinition[] dd = new DeviceDefinition[count]; // //AudioDeviceDefinition dd = (AudioDeviceDefinition)Marshal.PtrToStructure(dc, typeof(AudioDeviceDefinition)); // //if (dc != null) // // Marshal.ReleaseComObject(dc); // hr = arSett.SetAudioDeviceById(null); // //arSett.SetSpeakerMatchOutput(true); // arSett.SetUseWASAPIEventMode(true); // arSett.SetUseFilters((int)MPARUseFilters.ALL); // arSett.SetAllowBitStreaming(true); // arSett.SetAC3EncodingMode(AC3Encoding.DISABLED); // arSett.SetUseTimeStretching(false); //} IMPAudioRendererConfig arSett = audioRenderer as IMPAudioRendererConfig; if (arSett != null) { int ac3Mode; hr = arSett.GetInt(MPARSetting.AC3_ENCODING, out ac3Mode); int sc; hr = arSett.GetInt(MPARSetting.SPEAKER_CONFIG, out sc); int sm; hr = arSett.GetInt(MPARSetting.WASAPI_MODE, out sm); bool em; hr = arSett.GetBool(MPARSetting.WASAPI_EVENT_DRIVEN, out em); /*DeviceDefinition[] */ IntPtr dc; //int count; //hr = arSett.GetAvailableAudioDevices(out dc, out count); //DsError.ThrowExceptionForHR(hr); ////DeviceDefinition[] dd = new DeviceDefinition[count]; //AudioDeviceDefinition dd = (AudioDeviceDefinition)Marshal.PtrToStructure(dc, typeof(AudioDeviceDefinition)); //if (dc != null) // Marshal.ReleaseComObject(dc); hr = arSett.SetString(MPARSetting.SETTING_AUDIO_DEVICE, ps.AudioPlaybackDevice); //arSett.SetSpeakerMatchOutput(true); arSett.SetBool(MPARSetting.WASAPI_EVENT_DRIVEN, true); arSett.SetInt(MPARSetting.USE_FILTERS, (int)MPARUseFilters.ALL); arSett.SetBool(MPARSetting.ALLOW_BITSTREAMING, true); arSett.SetInt(MPARSetting.AC3_ENCODING, (int)AC3Encoding.DISABLED); arSett.SetBool(MPARSetting.ENABLE_TIME_STRETCHING, false); } //try //{ hr = graphBuilder.AddSourceFilter(filename, "Source", out sourceFilter); if (hr < 0) { //if it doesn't work before failing try to load it with the WMV reader sourceFilter = (IBaseFilter)new WMAsfReader(); hr = graphBuilder.AddFilter(sourceFilter, "WM/ASF Reader"); DsError.ThrowExceptionForHR(hr); hr = ((IFileSourceFilter)sourceFilter).Load(filename, null); DsError.ThrowExceptionForHR(hr); wmReader = sourceFilter as WindowsMediaLib.IWMReaderAdvanced2; } IPin outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); while (outPin != null) { try { hr = graphBuilder.Render(outPin); DsError.ThrowExceptionForHR(hr); } finally { if (outPin != null) Marshal.ReleaseComObject(outPin); outPin = null; } outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); } if (ps.MultiChannelWMA) { FileLogger.Log("Set multichannel mode for WMA"); IBaseFilter wmaDec = FilterGraphTools.FindFilterByName(graphBuilder, "WMAudio Decoder DMO"); if (wmaDec != null) { try { //http://msdn.microsoft.com/en-us/library/aa390550(VS.85).aspx IPropertyBag bag = wmaDec as IPropertyBag; if (bag != null) { object pVar; hr = bag.Read("_HIRESOUTPUT", out pVar, null); DsError.ThrowExceptionForHR(hr); bool bVar = (bool)pVar; FileLogger.Log("_HIRESOUTPUT = {0}", bVar); if (!bVar) { IPin wmaOut = DsFindPin.ByDirection(wmaDec, PinDirection.Output, 0); IPin cPin = null; try { hr = wmaOut.ConnectedTo(out cPin); DsError.ThrowExceptionForHR(hr); if (cPin != null) //cpin should never be null at this point, but lets be safe { hr = wmaOut.Disconnect(); DsError.ThrowExceptionForHR(hr); List<Guid> oldFilters = new List<Guid>(); IBaseFilter oFilt = FilterGraphTools.GetFilterFromPin(cPin); try { while (oFilt != null) { IBaseFilter cFilter = null; try { Guid clsid; hr = oFilt.GetClassID(out clsid); DsError.ThrowExceptionForHR(hr); if (clsid != DSOUND_RENDERER) { oldFilters.Add(clsid); cFilter = FilterGraphTools.GetConnectedFilter(oFilt, PinDirection.Output, 0); } hr = graphBuilder.RemoveFilter(oFilt); DsError.ThrowExceptionForHR(hr); } finally { if (oFilt != null) Marshal.ReleaseComObject(oFilt); oFilt = null; } oFilt = cFilter; } } finally { if (oFilt != null) Marshal.ReleaseComObject(oFilt); oFilt = null; } foreach (Guid addFilt in oldFilters) { IBaseFilter addMe = FilterGraphTools.AddFilterFromClsid(graphBuilder, addFilt, addFilt.ToString()); if (addMe != null) Marshal.ReleaseComObject(addMe); } } pVar = true; hr = bag.Write("_HIRESOUTPUT", ref pVar); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.Render(wmaOut); DsError.ThrowExceptionForHR(hr); } finally { if (wmaOut != null) Marshal.ReleaseComObject(wmaOut); if (cPin != null) Marshal.ReleaseComObject(cPin); } } } } catch (Exception ex) { FileLogger.Log("Error setting multichannel mode for WMA: {0}", ex.Message); } finally { while(Marshal.ReleaseComObject(wmaDec) > 0); } } } //} //finally //{ // if (sourceFilter != null) // Marshal.ReleaseComObject(sourceFilter); //} if (ps.DXVAWMV) { FileLogger.Log("Set DXVA for WMV"); IBaseFilter wmvDec = FilterGraphTools.FindFilterByName(graphBuilder, "WMVideo Decoder DMO"); if (wmvDec != null) { try { MediaFoundation.Misc.IPropertyStore config = wmvDec as MediaFoundation.Misc.IPropertyStore; if (config != null) { MediaFoundation.Misc.PropVariant pv = new MediaFoundation.Misc.PropVariant(); //config.GetValue(MediaFoundation.Misc.WMVConst.MFPKEY_DXVA_ENABLED, pv); } } catch (Exception ex) { FileLogger.Log("Error setting DXVA mode for WMV: {0}", ex.Message); } finally { while (Marshal.ReleaseComObject(wmvDec) > 0) ; } } } SetEvrVideoMode(); // QueryInterface for DirectShow interfaces this.mediaControl = (IMediaControl)this.graphBuilder; this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.mediaSeeking = (IMediaSeeking)this.graphBuilder; this.mediaPosition = (IMediaPosition)this.graphBuilder; // Query for video interfaces, which may not be relevant for audio files //this.videoWindow = this.graphBuilder as IVideoWindow; //this.basicVideo = this.graphBuilder as IBasicVideo; // Query for audio interfaces, which may not be relevant for video-only files this.basicAudio = this.graphBuilder as IBasicAudio; // Is this an audio-only file (no video component)? CheckVisibility(); // Have the graph signal event via window callbacks for performance hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WM.GRAPH_NOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); if (!this.isAudioOnly) { // Setup the video window //hr = this.videoWindow.put_Owner(this.Handle); //DsError.ThrowExceptionForHR(hr); //this.evrDisplay.SetVideoWindow(this.Handle); //hr = this.videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); //DsError.ThrowExceptionForHR(hr); hr = InitVideoWindow();//1, 1); DsError.ThrowExceptionForHR(hr); GetFrameStepInterface(); } else { // Initialize the default player size and enable playback menu items hr = InitPlayerWindow(); DsError.ThrowExceptionForHR(hr); EnablePlaybackMenu(true, MediaType.Audio); } // Complete window initialization //CheckSizeMenu(menuFileSizeNormal); //this.isFullScreen = false; this.currentPlaybackRate = 1.0; UpdateMainTitle(); this.Activate(); //pre-roll the graph hr = this.mediaControl.Pause(); DsError.ThrowExceptionForHR(hr); if (wmReader != null) { WindowsMediaLib.PlayMode pMode; hr = wmReader.GetPlayMode(out pMode); DsError.ThrowExceptionForHR(hr); if (pMode == WindowsMediaLib.PlayMode.Streaming) { int pdwPercent = 0; long pcnsBuffering; while (pdwPercent < 100) { hr = wmReader.GetBufferProgress(out pdwPercent, out pcnsBuffering); DsError.ThrowExceptionForHR(hr); if (pdwPercent >= 100) break; int sleepFor = Convert.ToInt32(pcnsBuffering / 1000); Thread.Sleep(100); } } } // Run the graph to play the media file hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); if (commWatcher != null) commWatcher.Dispose(); string commPath = string.Empty; if (ps.UseDtbXml) { commWatcher = new FileSystemWatcher(Commercials.XmlDirectory, Commercials.GetXmlFilename(filename)); commPath = Path.Combine(Commercials.XmlDirectory, Commercials.GetXmlFilename(filename)); } else { commWatcher = new FileSystemWatcher(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); commPath = Path.Combine(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); } ReadComm(commPath); commWatcher.Changed += new FileSystemEventHandler(commWatcher_Changed); commWatcher.Created += new FileSystemEventHandler(commWatcher_Changed); //commWatcher.NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.Size; commWatcher.EnableRaisingEvents = true; MoveToBookmark(); this.currentState = PlayState.Running; if (isFullScreen) tmMouseMove.Enabled = true; } else { //MessageBox.Show("EVR cannot be loaded on this PC"); using (EPDialog ed = new EPDialog()) ed.ShowDialog("Error", "The Enhanced Video Renderer cannot be loaded", 20, this); } } finally { //if (wmReader != null) // Marshal.ReleaseComObject(wmReader); if (sourceFilter != null) while(Marshal.ReleaseComObject(sourceFilter)>0); } }