コード例 #1
0
        public static void ConnectFilters(IGraphBuilder graphBuilder, IPin sourcePin, IPin destinationPin,
                                          bool useIntelligentConnect)
        {
            int hr = 0;

            if (graphBuilder == null)
            {
                throw new ArgumentNullException("graphBuilder");
            }

            if (sourcePin == null)
            {
                throw new ArgumentNullException("sourcePin");
            }

            if (destinationPin == null)
            {
                throw new ArgumentNullException("destinationPin");
            }

            if (useIntelligentConnect)
            {
                hr = graphBuilder.Connect(sourcePin, destinationPin);
                DsError.ThrowExceptionForHR(hr);
            }
            else
            {
                hr = graphBuilder.ConnectDirect(sourcePin, destinationPin, null);
                DsError.ThrowExceptionForHR(hr);
            }
        }
コード例 #2
0
        private void Play()
        {
            int    r;
            String selected_device_name = cbDevices.SelectedItem.ToString();

            if (selected_device_name.Length <= 0)
            {
                return;
            }

            IBaseFilter input = ((Device)cbDevices.SelectedItem).Filter;
            VideoOutPinConfiguration pin_config = (VideoOutPinConfiguration)cbFormat.SelectedItem;

            if (input == null || pin_config == null || !pin_config.ApplyConfiguration())
            {
                return;
            }

            r = graph_builder.AddFilter(input, cbDevices.SelectedText);
            r = graph_builder.AddFilter((IBaseFilter)grabber, "grabber");
            IPin input_out  = pin_config.Pin;
            IPin grabber_in = GetFirstPin((IBaseFilter)grabber, PinDirection.Input);

            if (input_out != null && grabber_in != null)
            {
                r = graph_builder.Connect(input_out, grabber_in);
            }
            r = media_control.Run();

            active_config = pin_config;
            bPlay.Text    = "Stop";
        }
コード例 #3
0
        public void RenderAudio(IGraphBuilder pGraphBuilder, IBaseFilter splitter)
        {
            var pPin = DsUtils.GetPin(splitter, PinDirection.Output, new[] { MediaType.Audio });

            if (pPin != null)
            {
                _streamSelect = splitter as IAMStreamSelect;
                if (_streamSelect != null && BuildSoundRenderer(pGraphBuilder))
                {
                    var pInputPin = DsUtils.GetPin(_directSoundBaseFilter, PinDirection.Input);
                    var hr        = pGraphBuilder.Connect(pPin, pInputPin);
                    Marshal.ReleaseComObject(pInputPin);
                    if (hr == DsHlp.S_OK || hr == DsHlp.VFW_S_PARTIAL_RENDER)
                    {
                        _audioStreams.AddRange(_streamSelect.GetSelectableStreams().Where(s => s.MajorType == MediaType.Audio));
                    }
                    else
                    {
                        pGraphBuilder.RemoveFilter(_directSoundBaseFilter);
                        Marshal.FinalReleaseComObject(_directSoundBaseFilter);
                        _directSoundBaseFilter = null;
                        _basicAudio            = null;
                    }
                }

                Marshal.ReleaseComObject(pPin);
            }
        }
コード例 #4
0
ファイル: Util.cs プロジェクト: avblocks/avblocks-samples
        public static int ConnectFilters(IGraphBuilder graph, IPin pinOut, IBaseFilter dest)
        {
            if ((graph == null) || (pinOut == null) || (dest == null))
            {
                return(WinAPI.E_FAIL);
            }

            IPin pinIn = null;

            try
            {
                // Find an input pin on the downstream filter.
                int hr = GetUnconnectedPin(dest, PinDirection.Input, out pinIn);
                DsError.ThrowExceptionForHR(hr);

                // Try to connect them.
                hr = graph.Connect(pinOut, pinIn);
                DsError.ThrowExceptionForHR(hr);

                return(0);
            }
            catch (COMException)
            {
            }
            finally
            {
                Util.ReleaseComObject(ref pinIn);
            }

            return(WinAPI.E_FAIL);
        }
コード例 #5
0
        public void RenderAudio(IGraphBuilder pGraphBuilder, IBaseFilter splitter)
        {
            IPin pPin;

            var nSkip = 0;

            while ((pPin = DsUtils.GetPin(splitter, PinDirection.Output, false, nSkip)) != null)
            {
                if (DsUtils.IsMediaTypeSupported(pPin, MediaType.Audio) == 0)
                {
                    // this unconnected pin supports audio type!
                    // let's render it!
                    if (BuildSoundRenderer(pGraphBuilder))
                    {
                        var pInputPin = DsUtils.GetPin(_directSoundBaseFilters.Last(), PinDirection.Input);
                        var hr        = pGraphBuilder.Connect(pPin, pInputPin);
                        Marshal.ReleaseComObject(pInputPin);
                        if (hr == DsHlp.S_OK || hr == DsHlp.VFW_S_PARTIAL_RENDER)
                        {
                            if (_directSoundBaseFilters.Count == 8)
                            {
                                Marshal.ReleaseComObject(pPin);
                                break; // out of while cycle
                            }
                        }
                        else
                        {
                            var pBaseFilter = _directSoundBaseFilters.Last();
                            pGraphBuilder.RemoveFilter(pBaseFilter);
                            Marshal.ReleaseComObject(pBaseFilter);

                            _basicAudioInterfaces.RemoveAt(_basicAudioInterfaces.Count - 1);
                            _directSoundBaseFilters.RemoveAt(_directSoundBaseFilters.Count - 1);

                            nSkip++;
                        }
                    }
                    else
                    {
                        // could not create/add DirectSound filter
                        Marshal.ReleaseComObject(pPin);
                        break; // out of while cycle
                    }
                }
                else
                {
                    nSkip++;
                }
                Marshal.ReleaseComObject(pPin);
            } // end of while

            _currentAudioStream = 0;
            _audioStreamsCount  = _basicAudioInterfaces.Count;
            const int lVolume = -10000;

            for (var i = 1; i < _audioStreamsCount; i++)
            {
                _basicAudioInterfaces[i].put_Volume(lVolume);
            }
        }
コード例 #6
0
        /// <summary>
        /// Connect the upstrem filter's pin with downstream filter
        /// </summary>
        /// <param name="graph"></param>
        /// <param name="pOut"></param>
        /// <param name="dest"></param>
        public static void ConnectFilters(IGraphBuilder graph, IPin pOut, IBaseFilter dest)
        {
            if (pOut == null)
            {
                throw new Exception("Unable to find any output pin");
            }

            IPin pIn = null;

            ///Get downstream filter's input pin
            GetUnconnectedPin(dest, out pIn, PinDirection.Input);

            try
            {
                ///Connect the pins
                if (graph.Connect(pOut, pIn) < 0)
                {
                    throw new Exception("Unable to connect selected pins");
                }
            }
            finally
            {
                ///Dispose off the input pin
                Marshal.ReleaseComObject(pIn);
            }
        }
コード例 #7
0
    public static void ConnectFilter(IGraphBuilder graph, IBaseFilter out_flt, int out_no, IBaseFilter in_flt,
                                     int in_no)
    {
        var out_pin = FindPin(out_flt, out_no, PIN_DIRECTION.PINDIR_OUTPUT);
        var inp_pin = FindPin(in_flt, in_no, PIN_DIRECTION.PINDIR_INPUT);

        graph.Connect(out_pin, inp_pin);
    }
コード例 #8
0
        public virtual void AddCompressor(FilterInfo fiCompressor)
        {
            RemoveCompressor();

            compressor = (Compressor)Filter.CreateFilter(fiCompressor);
            iGB.AddFilter(compressor.BaseFilter, compressor.FriendlyName);
            compressor.AddedToGraph(fgm); // Chooses input pin

            try
            {
                iGB.Connect(source.OutputPin, compressor.InputPin);
            }
            catch (COMException)
            {
                RemoveCompressor();
                throw;
            }
        }
コード例 #9
0
        /// <summary>
        /// Crée le filtre "File Splitter (demuxer)" et l'ajoute au graphe.
        /// </summary>
        /// <param name="graph">Le graphe.</param>
        /// <param name="filtersConfig">La configuration pour ce fichier.</param>
        /// <param name="sourceOutputPin">Le pin de sortie de la source.</param>
        /// <param name="parserOutputAudioPin">Le pin de sortie audio.</param>
        /// <param name="parserOutputVideoPin">Le pin de sortie vidéo</param>
        /// <returns>Le filtre File SPlitter.</returns>
        internal static IBaseFilter CreateSplitterFilter(IGraphBuilder graph, ExtensionFiltersSource filtersConfig, IPin sourceOutputPin,
                                                         out IPin parserOutputAudioPin, out IPin parserOutputVideoPin)
        {
            Type        filterType   = null;
            IBaseFilter parserFilter = null;

            FilterSource splitterSource = filtersConfig.Splitter;

            if (splitterSource.SourceType != FilterSourceTypeEnum.External)
            {
                throw new InvalidOperationException("The Splitter Filter Source cannot be Automatic");
            }

            CreateFilter(splitterSource.ExternalCLSID, splitterSource.Name, ref filterType, ref parserFilter);

            IPin parserInputPin = DsFindPin.ByDirection(parserFilter, PinDirection.Input, 0);

            int hr = graph.AddFilter(parserFilter, splitterSource.Name);

            DsError.ThrowExceptionForHR(hr);

            // Connectiong FileSource et Splitter
            hr = graph.Connect(sourceOutputPin, parserInputPin);
            DsError.ThrowExceptionForHR(hr);

            SafeRelease(sourceOutputPin);
            SafeRelease(parserInputPin);

            // Pins de sortie

            parserOutputAudioPin = DsFindPin.ByDirection(parserFilter, PinDirection.Output, 0);
            parserOutputVideoPin = DsFindPin.ByDirection(parserFilter, PinDirection.Output, 1);

            // Only video or audio
            if (parserOutputVideoPin == null)
            {
                parserOutputVideoPin = parserOutputAudioPin;
                parserOutputAudioPin = null;
            }

            parserOutputVideoPin.EnumMediaTypes(out IEnumMediaTypes enumMediaTypes);
            AMMediaType[] mts = { null };
            while (enumMediaTypes.Next(1, mts, IntPtr.Zero) == 0)
            {
                if (mts[0].majorType != MediaType.Video)
                {
                    // Invert in case it's not the appropriate media type
                    IPin temp = parserOutputAudioPin;
                    parserOutputAudioPin = parserOutputVideoPin;
                    parserOutputVideoPin = temp;
                    break;
                }
            }

            return(parserFilter);
        }
コード例 #10
0
        public virtual void AddCompressor(FilterInfo fiCompressor)
        {
            RemoveCompressor();

            compressor = (Compressor)Filter.CreateFilter(fiCompressor);
            iGB.AddFilter(compressor.BaseFilter, compressor.FriendlyName);
            compressor.AddedToGraph(fgm); // Chooses input pin
            Dictionary <string, Object> args = new Dictionary <string, object>();

            args.Add("SourceFilter", source);
            compressor.PreConnectConfig(args);

            try
            {
                iGB.Connect(source.OutputPin, compressor.InputPin);
            }
            catch (COMException)
            {
                RemoveCompressor();
                throw;
            }
        }
コード例 #11
0
ファイル: Converter.cs プロジェクト: nickmayer/mediabrowser
        /// <summary>Connects together to graph filters.</summary>
        /// <param name="graph">The graph on which the filters exist.</param>
        /// <param name="source">The source filter.</param>
        /// <param name="outPinName">The name of the output pin on the source filter.</param>
        /// <param name="destination">The destination filter.</param>
        /// <param name="inPinName">The name of the input pin on the destination filter.</param>
        protected void Connect(IGraphBuilder graph, IBaseFilter source, string outPinName,
                               IBaseFilter destination, string inPinName)
        {
            IPin outPin = source.FindPin(outPinName);

            DisposalCleanup.Add(outPin);

            IPin inPin = destination.FindPin(inPinName);

            DisposalCleanup.Add(inPin);

            graph.Connect(outPin, inPin);
        }
コード例 #12
0
ファイル: Utility.cs プロジェクト: mrLunatic/WebCam
        /// <summary>
        /// Using a GraphBuilder, connect an output pin to the destination filter.
        /// </summary>
        /// <param name="pGraph">Specifies the graph builder.</param>
        /// <param name="pOut">Specifies the output pin to connect.</param>
        /// <param name="pDst">Specifies the destination filter.</param>
        /// <returns>A value of 0 is returned on success, or an error value on failure.</returns>
        public static int ConnectFilters(IGraphBuilder pGraph, IPin pOut, IBaseFilter pDst)
        {
            IPin pIn = null;

            int hr = FindUnconnectedPin(pDst, PinDirection.Input, out pIn);

            if (hr == 0)
            {
                hr = pGraph.Connect(pOut, pIn);
                Marshal.ReleaseComObject(pIn);
            }

            return(hr);
        }
コード例 #13
0
        private static void ConnectSubpictureToSubpictureFilter(IGraphBuilder pGraphBuilder, IPin subpictureOutPin, IBaseFilter subpictureFilter)
        {
            var subpictureInputPin = GetInputPin(subpictureFilter);

            try
            {
                var hr = pGraphBuilder.Connect(subpictureOutPin, subpictureInputPin);
                hr.ThrowExceptionForHR(GraphBuilderError.CantRenderSubpicture);
            }
            finally
            {
                Marshal.ReleaseComObject(subpictureInputPin);
            }
        }
コード例 #14
0
        void ConnectPins(IBaseFilter upFilter, string upName, IBaseFilter downFilter, string downName)
        {
            int          hr;
            IPin         pin1, pin2;
            PinDirection PinDirThis;

            if (upName == "CapturePin")
            {
                pin1 = captureDevOutPin;
            }
            else
            {
                hr = upFilter.FindPin(upName, out pin1);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                hr = pin1.QueryDirection(out PinDirThis);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if (PinDirThis != PinDirection.Output)
                {
                    throw new Exception("Wrong upstream pin");
                }
            }
            //pin1 = GetPin(upFilter, PinDirection.Output);
            hr = downFilter.FindPin(downName, out pin2);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            hr = pin2.QueryDirection(out PinDirThis);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            if (PinDirThis != PinDirection.Input)
            {
                throw new Exception("Wrong downstream pin");
            }
            //pin2 = GetPin(downFilter, PinDirection.Input);
            hr = graphBuilder.Connect(pin1, pin2);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
        }
コード例 #15
0
        private bool TryConnectToAny(IPin sourcePin, IBaseFilter destinationFilter)
        {
            IEnumPins pinEnum;
            int       hr = destinationFilter.EnumPins(out pinEnum);

            DsError.ThrowExceptionForHR(hr);
            IPin[] pins = { null };
            while (pinEnum.Next(pins.Length, pins, IntPtr.Zero) == 0)
            {
                int err = m_graphBuilder.Connect(sourcePin, pins[0]);
                if (err == 0)
                {
                    return(true);
                }
                Marshal.ReleaseComObject(pins[0]);
            }
            return(false);
        }
コード例 #16
0
        public void RenderVideo(IGraphBuilder pGraphBuilder, IRenderer renderer)
        {
            InsureSourceOutPin();

            var videoRendererInputPin = renderer.GetInputPin();

            if (videoRendererInputPin != null)
            {
                var hr = pGraphBuilder.Connect(_sourceOutPin, videoRendererInputPin);
                Marshal.ReleaseComObject(videoRendererInputPin);

                // that's it, if hr > 0 (partial success) the video stream is already rendered but there are unrendered (audio) streams

                hr.ThrowExceptionForHR(GraphBuilderError.CantRenderFile);
            }
            else
            {
                throw new FilterGraphBuilderException(GraphBuilderError.CantRenderFile);
            }
        }
コード例 #17
0
        public override void AddedToGraph(FilgraphManager fgm)
        {
            IGraphBuilder gb = (IGraphBuilder)fgm;

            //Add the Blackmagic Decoder filter and connect it.
            try {
                bfDecoder = Filter.CreateBaseFilterByName("Blackmagic Design Decoder (DMO)");
                gb.AddFilter(bfDecoder, "Blackmagic Design Decoder (DMO)");
                IPin decoderInput;
                bfDecoder.FindPin("in0", out decoderInput);
                bfDecoder.FindPin("out0", out decoderOutput);
                captureOutput = GetPin(filter, _PinDirection.PINDIR_OUTPUT, Pin.PIN_CATEGORY_CAPTURE, Guid.Empty, false, 0);
                gb.Connect(captureOutput, decoderInput);
            }
            catch {
                throw new ApplicationException("Failed to add the BlackMagic Decoder filter to the graph");
            }

            base.AddedToGraph(fgm);
        }
コード例 #18
0
        /// <summary>
        /// カメラ制御とビデオ品質制御
        /// </summary>
        public static void Sample14()
        {
            string __FUNCTION__ = MethodBase.GetCurrentMethod().Name;

            Console.WriteLine(__FUNCTION__);

            IGraphBuilder         graph         = null;
            ICaptureGraphBuilder2 builder       = null;
            IBaseFilter           videoSource   = null;
            IBaseFilter           videoGrabber  = null;
            IBaseFilter           videoRenderer = null;
            var videoGrabberCB = new CxSampleGrabberCB();

            try
            {
                #region グラフビルダーの生成:
                {
                    // 必須:
                    graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph);
                    if (graph == null)
                    {
                        throw new System.IO.IOException("Failed to create a GraphBuilder.");
                    }

                    // 任意: 当例に於いては必須ではありませんが、後述のフィルタ接続を簡潔に記述できます.
                    builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2);
                    if (builder == null)
                    {
                        throw new System.IO.IOException("Failed to create a GraphBuilder.");
                    }
                    builder.SetFiltergraph(graph);
                }
                #endregion

                #region  像入力用: ソースフィルタを生成します.
                {
                    videoSource = Axi.CreateFilter(GUID.CLSID_VideoInputDeviceCategory, null, 0);
                    if (videoSource == null)
                    {
                        throw new System.IO.IOException("Failed to create a videoSource.");
                    }
                    graph.AddFilter(videoSource, "VideoSource");

                    // フレームサイズを設定します.
                    // ※注) この操作は、ピンを接続する前に行う必要があります.
                    IPin pin = Axi.FindPin(videoSource, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                    Axi.SetFormatSize(pin, 640, 480);
                }
                #endregion

                #region  像捕獲用: サンプルグラバーを生成します.
                {
                    videoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber);
                    if (videoGrabber == null)
                    {
                        throw new System.IO.IOException("Failed to create a videoGrabber.");
                    }
                    graph.AddFilter(videoGrabber, "videoGrabber");

                    // サンプルグラバフィルタの入力形式設定.
                    // SetMediaType で必要なメディア タイプを指定します。
                    //   http://msdn.microsoft.com/ja-jp/library/cc369546.aspx
                    // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。
                    // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。
                    // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。
                    // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx
                    // subtype  : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx
                    {
                        var grabber = (ISampleGrabber)videoGrabber;

                        var mt = new AM_MEDIA_TYPE();
                        mt.majortype  = new Guid(GUID.MEDIATYPE_Video);
                        mt.subtype    = new Guid(GUID.MEDIASUBTYPE_RGB24);
                        mt.formattype = new Guid(GUID.FORMAT_VideoInfo);
                        grabber.SetMediaType(mt);
                        grabber.SetBufferSamples(false);                                        // サンプルコピー 無効.
                        grabber.SetOneShot(false);                                              // One Shot 無効.
                        //grabber.SetCallback(videoGrabberCB, 0);	// 0:SampleCB メソッドを呼び出すよう指示する.
                        grabber.SetCallback(videoGrabberCB, 1);                                 // 1:BufferCB メソッドを呼び出すよう指示する.
                    }
                }
                #endregion

                #region  像出力用: レンダラーを生成します.
                {
                    videoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer);
                    if (videoRenderer == null)
                    {
                        throw new System.IO.IOException("Failed to create a videoRenderer.");
                    }
                    graph.AddFilter(videoRenderer, "videoRenderer");
                }
                #endregion

                #region フィルタの接続:
                if (builder != null)
                {
                    unsafe
                    {
                        var mediatype = new Guid(GUID.MEDIATYPE_Video);
                        var hr        = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype), videoSource, videoGrabber, videoRenderer);
                        if (hr < HRESULT.S_OK)
                        {
                            throw new CxDSException(hr);
                        }
                    }
                }
                else
                {
                    // ピンの取得.
                    IPin videoSourceOutput  = Axi.FindPin(videoSource, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                    IPin videoGrabberInput  = Axi.FindPin(videoGrabber, 0, PIN_DIRECTION.PINDIR_INPUT);
                    IPin videoGrabberOutput = Axi.FindPin(videoGrabber, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                    IPin videoRendererInput = Axi.FindPin(videoRenderer, 0, PIN_DIRECTION.PINDIR_INPUT);

                    // ピンの接続.
                    graph.Connect(videoSourceOutput, videoGrabberInput);
                    graph.Connect(videoGrabberOutput, videoRendererInput);
                }
                #endregion

                // ------------------------------

                #region カメラ制御:
                Console.WriteLine("");
                Console.WriteLine("CameraControlProperty");
                {
                    #region 取得:
                    var watch = new Stopwatch();
                    watch.Start();
                    var ranges  = new Dictionary <CameraControlProperty, TxPropertyRange>();
                    var control = Axi.GetInterface <IAMCameraControl>(graph);
                    foreach (CameraControlProperty prop in Enum.GetValues(typeof(CameraControlProperty)))
                    {
                        HRESULT hr;
                        var     range = TxPropertyRange.Reset();

                        try
                        {
                            // レンジの取得.
                            hr = (HRESULT)control.GetRange(
                                prop,
                                ref range.Min,
                                ref range.Max,
                                ref range.SteppingDelta,
                                ref range.DefaultValue,
                                ref range.CapsFlag
                                );
                            if (hr < HRESULT.S_OK)
                            {
                                continue;
                            }

                            // 現在値の取得.
                            hr = (HRESULT)control.Get(
                                prop,
                                ref range.Value,
                                ref range.Flag
                                );
                            if (hr < HRESULT.S_OK)
                            {
                                continue;
                            }

                            range.IsSupported = true;
                        }
                        catch (System.Exception)
                        {
                        }
                        finally
                        {
                            ranges[prop] = range;
                        }
                    }
                    Console.WriteLine("Completed. {0:F3} msec", watch.Elapsed.TotalMilliseconds);
                    #endregion

                    #region 視覚的な確認の為の出力:
                    Console.WriteLine("--- {0,-22}: {1,5}, {2,5}, {3,6}, {4,4}, {5,5} ~ {6,5}",
                                      "Name", "Value", "Def", "Flag", "Step", "Lower", "Upper");

                    foreach (var item in ranges)
                    {
                        var name  = item.Key.ToString();
                        var range = item.Value;
                        if (range.IsSupported)
                        {
                            Console.WriteLine("[O] {0,-22}: {1,5}, {2,5}, 0x{3:X4}, {4,4}, {5,5} ~ {6,5}",
                                              name,
                                              range.Value,
                                              range.DefaultValue,
                                              range.CapsFlag,
                                              range.SteppingDelta,
                                              range.Min,
                                              range.Max
                                              );
                        }
                        else
                        {
                            Console.WriteLine("[-] {0,-22}:", name);
                        }
                    }
                    #endregion
                }
                #endregion

                #region ビデオ品質制御:
                Console.WriteLine("");
                Console.WriteLine("VideoProcAmpProperty");
                {
                    #region 取得:
                    var watch = new Stopwatch();
                    watch.Start();
                    var ranges  = new Dictionary <VideoProcAmpProperty, TxPropertyRange>();
                    var control = Axi.GetInterface <IAMVideoProcAmp>(graph);
                    foreach (VideoProcAmpProperty prop in Enum.GetValues(typeof(VideoProcAmpProperty)))
                    {
                        HRESULT hr;
                        var     range = TxPropertyRange.Reset();

                        try
                        {
                            // レンジ.
                            hr = (HRESULT)control.GetRange(
                                prop,
                                ref range.Min,
                                ref range.Max,
                                ref range.SteppingDelta,
                                ref range.DefaultValue,
                                ref range.CapsFlag
                                );
                            if (hr >= HRESULT.S_OK)
                            {
                                // 現在値.
                                hr = (HRESULT)control.Get(
                                    prop,
                                    ref range.Value,
                                    ref range.Flag
                                    );
                                if (hr >= HRESULT.S_OK)
                                {
                                    range.IsSupported = true;
                                }
                            }
                        }
                        catch (System.Exception)
                        {
                        }

                        ranges[prop] = range;
                    }
                    Console.WriteLine("Completed. {0:F3} msec", watch.Elapsed.TotalMilliseconds);
                    #endregion

                    #region 視覚的な確認の為の出力:
                    Console.WriteLine("--- {0,-22}: {1,5}, {2,5}, {3,6}, {4,4}, {5,5} ~ {6,5}",
                                      "Name", "Value", "Def", "Flag", "Step", "Lower", "Upper");

                    foreach (var item in ranges)
                    {
                        var name  = item.Key.ToString();
                        var range = item.Value;
                        if (range.IsSupported)
                        {
                            Console.WriteLine("[O] {0,-22}: {1,5}, {2,5}, 0x{3:X4}, {4,4}, {5,5} ~ {6,5}",
                                              name,
                                              range.Value,
                                              range.DefaultValue,
                                              range.CapsFlag,
                                              range.SteppingDelta,
                                              range.Min,
                                              range.Max
                                              );
                        }
                        else
                        {
                            Console.WriteLine("[-] {0,-22}:", name);
                        }
                    }
                    #endregion
                }
                #endregion
            }
            catch (System.Exception ex)
            {
                Console.WriteLine("{0}", ex.StackTrace);
            }
            finally
            {
                #region 解放:
                if (videoSource != null)
                {
                    Marshal.ReleaseComObject(videoSource);
                }
                videoSource = null;

                if (videoGrabber != null)
                {
                    Marshal.ReleaseComObject(videoGrabber);
                }
                videoGrabber = null;

                if (videoRenderer != null)
                {
                    Marshal.ReleaseComObject(videoRenderer);
                }
                videoRenderer = null;

                if (builder != null)
                {
                    Marshal.ReleaseComObject(builder);
                }
                builder = null;

                if (graph != null)
                {
                    Marshal.ReleaseComObject(graph);
                }
                graph = null;
                #endregion
            }
        }
コード例 #19
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                // Create the grabber
                _grabber       = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                // Add the source and grabber to the main graph
                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType   = MediaSubTypes.RGB32;
                    _grabber.SetMediaType(mediaType);

                    if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (_grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int  retryCount = 0;
                            bool succeeded  = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    _capGrabber.Width  = header.BmiHeader.Width;
                                    _capGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                    _grabber.SetBufferSamples(false);
                    _grabber.SetOneShot(false);
                    _grabber.SetCallback(_capGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    _control = (IMediaControl)_graph;
                    _control.Run();

                    // Wait for the stop signal
                    while (!_stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    _control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                Release();
            }
        }
コード例 #20
0
    /// <summary>
    /// helper function to connect 2 filters
    /// </summary>
    /// <param name="graphBuilder">The graph builder.</param>
    /// <param name="pinSource">The ping source.</param>
    /// <param name="destinationFilter">The destination filter.</param>
    /// <param name="destinationPinIndex">The index of the destination pin</param>
    /// <returns></returns>
    public static bool ConnectFilter(IGraphBuilder graphBuilder, IPin pinSource, IBaseFilter destinationFilter,
                                     out int destinationPinIndex)
    {
      //Log.Log.WriteFile("analog: ConnectFilter()");
      Log.Log.WriteFile("analog:  PinSource:{0}", LogPinInfo(pinSource));
      destinationPinIndex = -1;
      for (int i = 0; i <= 10; ++i)
      {
        IPin pinIn = DsFindPin.ByDirection(destinationFilter, PinDirection.Input, i);
        if (pinIn == null)
          return false;
        destinationPinIndex++;
        IPin connectedToPin;
        if (pinIn.ConnectedTo(out connectedToPin) != 0)
          connectedToPin = null;

        if (connectedToPin == null)
        {
          Log.Log.WriteFile("analog:  pinDest {0}:{1}", i, LogPinInfo(pinIn));
          int hr = graphBuilder.Connect(pinSource, pinIn);
          if (hr == 0)
          {
            Log.Log.WriteFile("analog:  pins connected");
            Release.ComObject("pindest" + i, pinIn);
            return true;
          }
        }
        else
        {
          Release.ComObject("connectedToPin", connectedToPin);
        }
        Release.ComObject("pindest" + i, pinIn);
      }
      return false;
    }
コード例 #21
0
ファイル: CxDSCamera.cs プロジェクト: cogorou/DSLab
        /// <summary>
        /// グラフの生成
        /// </summary>
        /// <param name="output_file">出力ファイル</param>
        public virtual void Setup(string output_file)
        {
            this.Dispose();

            try
            {
                CxDSCameraParam param = this.Param;

                // グラフビルダー.
                // CoCreateInstance
                GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph)));

                #region フィルタ追加.
                // 画像入力フィルタ.
                IBaseFilter capture = CreateVideoCapture(param);
                if (capture == null)
                    throw new System.IO.IOException();
                this.GraphBuilder.AddFilter(capture, "CaptureFilter");
                IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                this.CaptureFilter = capture;
                this.CaptureOutPin = capture_out;

                // サンプルグラバー.
                IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber();
                if (grabber == null)
                    throw new System.IO.IOException();
                this.GraphBuilder.AddFilter(grabber, "SampleGrabber");
                this.SampleGrabber = (ISampleGrabber)grabber;
                #endregion

                #region キャプチャビルダー:
                {
                    int hr = 0;
                    CaptureBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_CaptureGraphBuilder2)));
                    hr = CaptureBuilder.SetFiltergraph(GraphBuilder);

                    if (string.IsNullOrEmpty(output_file))
                    {
                        // レンダラー.
                        IBaseFilter renderer = null;
                        renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer)));
                        if (renderer == null)
                            throw new System.IO.IOException();
                        this.GraphBuilder.AddFilter(renderer, "Renderer");
                        this.Renderer = renderer;

            #if true
                        // IGraphBuilder.Connect の代わりに ICaptureGraphBuilder2.RenderStream を使用する.
                        // fig) [capture]-out->-in-[sample grabber]-out->-in-[null render]
                        hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, renderer);

            #else
                        // ピンの取得.
                        IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT);
                        IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                        IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT);

                        // ピンの接続.
                        GraphBuilder.Connect(capture_out, grabber_in);
                        GraphBuilder.Connect(grabber_out, renderer_in);

                        // ピンの保管.
                        //SampleGrabberInPin = grabber_in;
                        //SampleGrabberOutPin = grabber_out;
                        //RendererInPin = renderer_in;
            #endif
                    }
                    else
                    {
                        IBaseFilter mux = null;
                        IFileSinkFilter sync = null;
                        hr = CaptureBuilder.SetOutputFileName(new Guid(GUID.MEDIASUBTYPE_Avi), output_file, ref mux, ref sync);
                        hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, mux);
                        this.Mux = mux;
                        this.Sync = sync;
                    }
                }
                #endregion

                #region 保管: フレームサイズ.
                VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber);
                this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader;
                this.SampleGrabberCB.FrameSize = new Size(
                    System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth),
                    System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight)
                    );
                #endregion

                #region 保管: デバイス名称.
                try
                {
                    if (string.IsNullOrEmpty(param.FilterInfo.Name) == false)
                    {
                        this.DeviceName = param.FilterInfo.Name;
                    }
                    else
                    {
                        int filter_index = param.FilterInfo.Index;
                        List<DSLab.CxDSFilterInfo> filters = DSLab.Axi.GetFilterList(DSLab.GUID.CLSID_VideoInputDeviceCategory);
                        if (0 <= filter_index && filter_index < filters.Count)
                        {
                            this.DeviceName = filters[filter_index].Name;
                        }
                    }
                }
                catch (System.Exception)
                {
                    this.DeviceName = "";
                }
                #endregion

                // DEBUG
            #if DEBUG
                DebugPrint(this.GraphBuilder);
            #endif
            }
            catch (Exception ex)
            {
                this.Dispose();
                throw new DSLab.CxDSException(ex);
            }
        }
コード例 #22
0
ファイル: DvdPlayer.cs プロジェクト: ORRNY66/GS
        /// <summary>
        /// Builds the DVD DirectShow graph
        /// </summary>
        private void BuildGraph()
        {
            try
            {
                FreeResources();

                int hr;

                /* Create our new graph */
                m_graph = (IGraphBuilder)new FilterGraphNoThread();

            #if DEBUG
                m_rot = new DsROTEntry(m_graph);
            #endif
                /* We are going to use the VMR9 for now.  The EVR does not
                 * seem to work with the interactive menus yet.  It should
                 * play Dvds fine otherwise */
                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates and initializes a new renderer ready to render to WPF */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 2);

                /* Do some VMR9 specific stuff */
                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if(mixer != null)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;

                        /* Enable this line to prefer YUV */
                        //hr = mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                /* Create a new DVD Navigator. */
                var dvdNav = (IBaseFilter)new DVDNavigator();

                /* The DVDControl2 interface lets us control DVD features */
                m_dvdControl = dvdNav as IDvdControl2;

                if (m_dvdControl == null)
                    throw new Exception("Could not QueryInterface the IDvdControl2 interface");

                /* QueryInterface the DVDInfo2 */
                m_dvdInfo = dvdNav as IDvdInfo2;

                /* If a Dvd directory has been set then use it, if not, let DShow find the Dvd */
                if (!string.IsNullOrEmpty(DvdDirectory))
                {
                    hr = m_dvdControl.SetDVDDirectory(DvdDirectory);
                    DsError.ThrowExceptionForHR(hr);
                }

                /* This gives us the DVD time in Hours-Minutes-Seconds-Frame time format, and other options */
                hr = m_dvdControl.SetOption(DvdOptionFlag.HMSFTimeCodeEvents, true);
                DsError.ThrowExceptionForHR(hr);

                /* If the graph stops, resume at the same point */
                m_dvdControl.SetOption(DvdOptionFlag.ResetOnStop, false);

                hr = m_graph.AddFilter(dvdNav, "DVD Navigator");
                DsError.ThrowExceptionForHR(hr);

                IPin dvdVideoPin = null;
                IPin dvdAudioPin = null;
                IPin dvdSubPicturePin = null;

                IPin dvdNavPin;
                int i = 0;

                /* Loop all the output pins on the DVD Navigator, trying to find which pins are which.
                 * We could more easily find the pins by name, but this is more fun...and more flexible
                 * if we ever want to use a 3rd party DVD navigator that used different pin names */
                while ((dvdNavPin = DsFindPin.ByDirection(dvdNav, PinDirection.Output, i)) != null)
                {
                    var mediaTypes = new AMMediaType[1];
                    IntPtr pFetched = IntPtr.Zero;

                    IEnumMediaTypes mediaTypeEnum;
                    dvdNavPin.EnumMediaTypes(out mediaTypeEnum);

                    /* Loop over each of the mediaTypes of each pin */
                    while (mediaTypeEnum.Next(1, mediaTypes, pFetched) == 0)
                    {
                        AMMediaType mediaType = mediaTypes[0];

                        /* This will be the video stream pin */
                        if (mediaType.subType == MediaSubType.Mpeg2Video)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdVideoPin = dvdNavPin;
                            break;
                        }

                        /* This will be the audio stream pin */
                        if (mediaType.subType == MediaSubType.DolbyAC3 ||
                           mediaType.subType == MediaSubType.Mpeg2Audio)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdAudioPin = dvdNavPin;
                            break;
                        }

                        /* This is the Dvd sub picture pin.  This generally
                         * shows overlays for Dvd menus and sometimes closed captions */
                        if (mediaType.subType == DVD_SUBPICTURE_TYPE)
                        {
                            /* Keep the ref and we'll work with it later */
                            dvdSubPicturePin = dvdNavPin;
                            break;
                        }
                    }

                    mediaTypeEnum.Reset();
                    Marshal.ReleaseComObject(mediaTypeEnum);
                    i++;
                }

                /* This is the windowed renderer.  This is *NEEDED* in order
                 * for interactive menus to work with the other VMR9 in renderless mode */
                var dummyRenderer = (IBaseFilter)new VideoMixingRenderer9();
                var dummyRendererConfig = (IVMRFilterConfig9)dummyRenderer;

                /* In order for this interactive menu trick to work, the VMR9
                 * must be set to Windowed.  We will make sure the window is hidden later on */
                hr = dummyRendererConfig.SetRenderingMode(VMR9Mode.Windowed);
                DsError.ThrowExceptionForHR(hr);

                hr = dummyRendererConfig.SetNumberOfStreams(1);
                DsError.ThrowExceptionForHR(hr);

                hr = m_graph.AddFilter(dummyRenderer, "Dummy Windowed");
                DsError.ThrowExceptionForHR(hr);

                if (dvdAudioPin != null)
                {
                    /* This should render out to the default audio device. We
                     * could modify this code here to go out any audio
                     * device, such as SPDIF or another sound card */
                    hr = m_graph.Render(dvdAudioPin);
                    DsError.ThrowExceptionForHR(hr);
                }

                /* Get the first input pin on our dummy renderer */
                m_dummyRendererPin = DsFindPin.ByConnectionStatus(dummyRenderer, /* Filter to search */
                                                                  PinConnectedStatus.Unconnected,
                                                                  0);

                /* Get an available pin on our real renderer */
                IPin rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
                                                                PinConnectedStatus.Unconnected,
                                                                0); /* Pin index */

                /* Connect the pin to the renderer */
                hr = m_graph.Connect(dvdVideoPin, rendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* Get the next available pin on our real renderer */
                rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */
                                                           PinConnectedStatus.Unconnected,
                                                           0); /* Pin index */

                /* Render the sub picture, which will connect
                 * the DVD navigator to the codec, not the renderer */
                hr = m_graph.Render(dvdSubPicturePin);
                DsError.ThrowExceptionForHR(hr);

                /* These are the subtypes most likely to be our dvd subpicture */
                var preferedSubpictureTypes = new[]{MediaSubType.ARGB4444,
                                                    MediaSubType.AI44,
                                                    MediaSubType.AYUV,
                                                    MediaSubType.ARGB32};
                IPin dvdSubPicturePinOut = null;

                /* Find what should be the subpicture pin out */
                foreach (var guidType in preferedSubpictureTypes)
                {
                    dvdSubPicturePinOut = FindPinInGraphByMediaType(guidType, /* GUID of the media type being searched for */
                                                                    PinDirection.Output,
                                                                    m_graph); /* Our current graph */
                    if (dvdSubPicturePinOut != null)
                        break;
                }

                if (dvdSubPicturePinOut == null)
                    throw new Exception("Could not find the sub picture pin out");

                /* Here we connec thte Dvd sub picture pin to the video renderer.
                 * This enables the overlays on Dvd menus and some closed
                 * captions to be rendered. */
                hr = m_graph.Connect(dvdSubPicturePinOut, rendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* Search for the Line21 out in the graph */
                IPin line21Out = FindPinInGraphByMediaType(MediaType.AuxLine21Data,
                                                           PinDirection.Output,
                                                           m_graph);
                if (line21Out == null)
                    throw new Exception("Could not find the Line21 pin out");

                /* We connect our line21Out out in to the dummy renderer
                 * this is what ultimatly makes interactive DVDs work with
                 * VMR9 in renderless (for WPF) */
                hr = m_graph.Connect(line21Out, m_dummyRendererPin);
                DsError.ThrowExceptionForHR(hr);

                /* This is the dummy renderers Win32 window. */
                m_dummyRenderWindow = dummyRenderer as IVideoWindow;

                if (m_dummyRenderWindow == null)
                    throw new Exception("Could not QueryInterface for IVideoWindow");

                ConfigureDummyWindow();

                /* Setup our base classes with this filter graph */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);
            }
            catch (Exception ex)
            {
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
                return;
            }

            InvokeMediaOpened();
        }
コード例 #23
0
ファイル: VideoFramePusher.cs プロジェクト: DeSciL/Ogama
    /// <summary> build the capture graph for grabber. </summary>
    private void SetupGraph(string FileName)
    {
      int hr;

      // Get the graphbuilder object
      this.graphBuilder = new FilterGraph() as IGraphBuilder;
      this.mediaControl = this.graphBuilder as IMediaControl;
      this.mediaSeeking = this.graphBuilder as IMediaSeeking;
      this.mediaEvent = this.graphBuilder as IMediaEvent;

      try
      {
        // Get the SampleGrabber interface
        this.sampleGrabber = new SampleGrabber() as ISampleGrabber;
        this.sampleGrabberFilter = sampleGrabber as IBaseFilter;

        ConfigureSampleGrabber(sampleGrabber);

        // Add the frame grabber to the graph
        hr = graphBuilder.AddFilter(sampleGrabberFilter, "Ds.NET Sample Grabber");
        DsError.ThrowExceptionForHR(hr);

        IBaseFilter aviSplitter = new AviSplitter() as IBaseFilter;

        // Add the aviSplitter to the graph
        hr = graphBuilder.AddFilter(aviSplitter, "Splitter");
        DsError.ThrowExceptionForHR(hr);

        // Have the graph builder construct its appropriate graph automatically
        hr = this.graphBuilder.RenderFile(FileName, null);
        DsError.ThrowExceptionForHR(hr);

#if DEBUG
        m_rot = new DsROTEntry(graphBuilder);
#endif

        // Remove the video renderer filter
        IBaseFilter defaultVideoRenderer = null;
        graphBuilder.FindFilterByName("Video Renderer", out defaultVideoRenderer);
        graphBuilder.RemoveFilter(defaultVideoRenderer);

        // Disconnect anything that is connected
        // to the output of the sample grabber
        IPin iPinSampleGrabberOut = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Output, 0);
        IPin iPinVideoIn;
        hr = iPinSampleGrabberOut.ConnectedTo(out iPinVideoIn);

        if (hr == 0)
        {
          // Disconnect the sample grabber output from the attached filters
          hr = iPinVideoIn.Disconnect();
          DsError.ThrowExceptionForHR(hr);

          hr = iPinSampleGrabberOut.Disconnect();
          DsError.ThrowExceptionForHR(hr);
        }
        else
        {
          // Try other way round because automatic renderer could not build
          // graph including the sample grabber
          IPin iPinAVISplitterOut = DsFindPin.ByDirection(aviSplitter, PinDirection.Output, 0);
          IPin iPinAVISplitterIn;
          hr = iPinAVISplitterOut.ConnectedTo(out iPinAVISplitterIn);
          DsError.ThrowExceptionForHR(hr);

          hr = iPinAVISplitterOut.Disconnect();
          DsError.ThrowExceptionForHR(hr);

          hr = iPinAVISplitterIn.Disconnect();
          DsError.ThrowExceptionForHR(hr);

          // Connect the avi splitter output to sample grabber
          IPin iPinSampleGrabberIn = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Input, 0);
          hr = graphBuilder.Connect(iPinAVISplitterOut, iPinSampleGrabberIn);
          DsError.ThrowExceptionForHR(hr);
        }

        // Add the null renderer to the graph
        nullrenderer = new NullRenderer() as IBaseFilter;
        hr = graphBuilder.AddFilter(nullrenderer, "Null renderer");
        DsError.ThrowExceptionForHR(hr);

        // Get the input pin of the null renderer
        IPin iPinNullRendererIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0);

        // Connect the sample grabber to the null renderer
        hr = graphBuilder.Connect(iPinSampleGrabberOut, iPinNullRendererIn);
        DsError.ThrowExceptionForHR(hr);

        // Read and cache the image sizes
        SaveSizeInfo(sampleGrabber);

        this.GetFrameStepInterface();
      }
      finally
      {
      }
    }
コード例 #24
0
ファイル: CxDSMediaPlayer.cs プロジェクト: cogorou/DSLab
        /// <summary>
        /// グラフの生成
        /// </summary>
        public virtual void Setup()
        {
            this.Dispose();

            try
            {
                // グラフ.
                // CoCreateInstance
                GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph)));

                #region フィルタ追加.
                // ファイル入力.
                IBaseFilter capture = null;
                GraphBuilder.AddSourceFilter(SourceFile, "CaptureFilter", ref capture);
                if (capture == null)
                    throw new System.IO.IOException();

            #if false
                // DMO ラッパーフィルタ.
                // https://msdn.microsoft.com/ja-jp/library/cc371140.aspx
                IBaseFilter dmo = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_DMOWrapperFilter)));
                if (dmo != null)
                {
                    //// Mpeg4 Decoder DMO
                    //// F371728A-6052-4D47-827C-D039335DFE0A
                    //// 4A69B442-28BE-4991-969C-B500ADF5D8A8
                    //// mpg4decd.dll [C:\Windows\System32, C:\Windows\SysWOW64]

                    var idmo = (IDMOWrapperFilter)dmo;
                    idmo.Init(new Guid("F371728A-6052-4D47-827C-D039335DFE0A"), new Guid("4A69B442-28BE-4991-969C-B500ADF5D8A8"));
                    idmo = null;

                    this.GraphBuilder.AddFilter(dmo, "Mpeg4 Decoder DMO");
                }
            #endif

            #if false
                // Avi Splitter
                IBaseFilter splitter = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVISplitter)));
                if (splitter == null)
                    throw new System.IO.IOException();
                this.GraphBuilder.AddFilter(splitter, "Avi Splitter");

                // Avi Decompressor
                IBaseFilter decompressor = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVIDec)));
                if (decompressor == null)
                    throw new System.IO.IOException();
                this.GraphBuilder.AddFilter(decompressor, "Avi Decompressor");
            #endif

                // サンプルグラバー.
                IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber();
                if (grabber == null)
                    throw new System.IO.IOException();
                this.GraphBuilder.AddFilter(grabber, "SampleGrabber");

                // レンダラー.
                IBaseFilter renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer)));
                if (renderer == null)
                    throw new System.IO.IOException();
                this.GraphBuilder.AddFilter(renderer, "Renderer");
                #endregion

                #region ピンの取得.
                IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT);
                IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT);
                #endregion

                #region ピンの接続.
                GraphBuilder.Connect(capture_out, grabber_in);
                GraphBuilder.Connect(grabber_out, renderer_in);
                #endregion

                #region 保管: インターフェース.
                CaptureFilter = capture;
                CaptureOutPin = capture_out;
                SampleGrabber = (ISampleGrabber)grabber;
                SampleGrabberInPin = grabber_in;
                SampleGrabberOutPin = grabber_out;
                Renderer = renderer;
                RendererInPin = renderer_in;
                #endregion

                #region 保管: フレームサイズ.
                VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber);
                this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader;
                this.SampleGrabberCB.FrameSize = new Size(
                    System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth),
                    System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight)
                    );
                #endregion

                #region インタフェースの抽出:
                {
                    DSLab.IGraphBuilder graph = this.GraphBuilder;
                    DSLab.IEnumFilters filters = null;
                    DSLab.IBaseFilter filter = null;
                    int fetched = 0;

                    int hr = graph.EnumFilters(ref filters);
                    while (filters.Next(1, ref filter, ref fetched) == (int)DSLab.HRESULT.S_OK)
                    {
                        if (fetched == 0) break;

                        if (filter is DSLab.IMediaSeeking)
                        {
                            // シーク操作用.
                            Seeking = (DSLab.IMediaSeeking)filter;
                        }
                        else
                        {
                            // フィルタ解放.
                            Marshal.ReleaseComObject(filter);
                            filter = null;
                        }
                    }

                    // 解放.
                    Marshal.ReleaseComObject(filters);
                }
                #endregion

                // DEBUG
            #if DEBUG
                DebugPrint(this.GraphBuilder);
            #endif
            }
            catch (Exception ex)
            {
                throw new DSLab.CxDSException(ex);
            }
        }
コード例 #25
0
    public static IBaseFilter AddToGraph(IGraphBuilder graphBuilder)
    {
      IBaseFilter vob = null;

      DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubAutoload, out vob);
      if (vob == null)
      {
        //Try the "normal" filter then.
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubNormal, out vob);
      }

      //if the directvobsub filter has not been added to the graph. (i.e. with evr)
      //we add a bit more intelligence to determine if subtitles are enabled.
      //and if subtitles are present for the video / movie then we add it if necessary to the graph.
      if (vob == null)
      {
        Log.Info("VideoPlayerVMR9: No VobSub filter in the current graph");
        //the filter has not been added lets check if it should be added or not.
        //add the filter to the graph
        vob = DirectShowUtil.AddFilterToGraph(graphBuilder, "DirectVobSub");
        if (vob == null)
        {
          Log.Warn("VideoPlayerVMR9: DirectVobSub filter not found! You need to install VSFilter");
          return null;
        }
        else
        {
          Log.Debug("VideoPlayerVMR9: VobSub filter added to graph");
        }
      }
      else // VobSub already loaded
      {
        return vob;
      }

      // Check if Haali Media Splitter is in the graph.
      IBaseFilter hms = null;
      DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.HaaliGuid, out hms);
      if (hms == null)
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.MPCMatroska, out hms);
      if (hms == null)
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.MPCMatroskaSource, out hms);
      if (hms == null)
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.LAVFilter, out hms);
      if (hms == null)
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.LAVFilterSource, out hms);
      if (hms != null)
      {
        IPin pinSubTo = null;
        // It is. Connect it' subtitle output pin (if any) to Vobsub's subtitle input.
        pinSubTo = DirectShowUtil.FindPin(hms, PinDirection.Output, "Subtitle");
        if (pinSubTo == null)
        {
          while (true)
          {
            IPin freeSubtitle = DirectShowUtil.FindFirstFreePinSub(hms, PinDirection.Output, "");
            IPin freeVobSub = DirectShowUtil.FindFirstFreePin(vob, PinDirection.Input, "Input");
            if (freeSubtitle != null && freeVobSub != null)
            {
              Log.Debug("VideoPlayerVMR9: Connecting Matroska's subtitle output to VobSub's input.");
              graphBuilder.Connect(freeSubtitle, freeVobSub);
              DirectShowUtil.ReleaseComObject(freeSubtitle);
              freeSubtitle = null;
              DirectShowUtil.ReleaseComObject(freeVobSub);
              freeVobSub = null;
            }
            else
              break;
          }
        }

        DirectShowUtil.ReleaseComObject(hms);
        hms = null;
        if (pinSubTo != null)
        {
          Log.Debug("VideoPlayerVMR9: Connecting Haali's subtitle output to VobSub's input.");
          // Try to render pins
          IPin pinVobSubSub = DirectShowUtil.FindPin(vob, PinDirection.Input, "Input");
          // If pinSubTo is already connected (disconnect it)
          graphBuilder.Disconnect(pinSubTo);
          graphBuilder.Connect(pinSubTo, pinVobSubSub);
          DirectShowUtil.ReleaseComObject(pinSubTo);
          pinSubTo = null;
          DirectShowUtil.ReleaseComObject(pinVobSubSub);
          pinVobSubSub = null;
        }
      }

      // Now check if vobsub's video input is not connected.
      // Check only if vmr9 is connected (render was successful).
      VMR9Util Vmr9 = VMR9Util.g_vmr9;
      if (Vmr9.IsVMR9Connected)
      {
        Log.Debug("VideoPlayerVMR9: Connect VobSub's video pins");

        IPin pinVobSubVideoIn = DsFindPin.ByDirection(vob, PinDirection.Input, 0);
        IPin pinVobSubVideoOut = DsFindPin.ByDirection(vob, PinDirection.Output, 0);

        // This is the pin that we will connect to vobsub's input.
        IPin pinVideoTo = Vmr9.PinConnectedTo;
        IPin pinVideoFrom = null;
        pinVideoTo.ConnectedTo(out pinVideoFrom);
        pinVideoTo.Disconnect();
        pinVideoFrom.Disconnect();
        //Now make connection to VobSub
        int hr = graphBuilder.Connect(pinVideoTo, pinVobSubVideoIn);
        //hr = graphBuilder.Render(pinVideoFrom);
        if (hr != 0)
        {
          Log.Error("VideoPlayerVMR9: could not connect Vobsub's input video pin");
          return null;
        }
        hr = graphBuilder.Connect(pinVobSubVideoOut, pinVideoFrom);
        //hr = graphBuilder.Render(pinVobSubVideoOut);
        if (hr != 0)
        {
          Log.Error("VideoPlayerVMR9: could not connect Vobsub's output video pin");
          return null;
        }

        Log.Debug("VideoPlayerVMR9: Vobsub's video pins connected");
        DirectShowUtil.ReleaseComObject(pinVideoTo);
        pinVideoTo = null;
        DirectShowUtil.ReleaseComObject(pinVobSubVideoIn);
        pinVobSubVideoIn = null;
        DirectShowUtil.ReleaseComObject(pinVobSubVideoOut);
        pinVobSubVideoOut = null;
        DirectShowUtil.ReleaseComObject(pinVideoFrom);
        pinVideoFrom = null;
      }
      Vmr9 = null;
      return vob;
    }
コード例 #26
0
ファイル: CapDevice.cs プロジェクト: evilmachina/theMachine
        void RunWorker()
        {
            try
            {

                graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                sourceObject = FilterInfo.CreateFilter(deviceMoniker);

                grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                grabberObject = grabber as IBaseFilter;

                graph.AddFilter(sourceObject, "source");
                graph.AddFilter(grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    grabber.SetMediaType(mediaType);

                    if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                            capGrabber.Width = header.BmiHeader.Width;
                            capGrabber.Height = header.BmiHeader.Height;
                        }
                    }
                    graph.Render(grabberObject.GetPin(PinDirection.Output, 0));
                    grabber.SetBufferSamples(false);
                    grabber.SetOneShot(false);
                    grabber.SetCallback(capGrabber, 1);

                    IVideoWindow wnd = (IVideoWindow)graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    control = (IMediaControl)graph;
                    control.Run();

                    while (!stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(ex);
            }
            finally
            {
                graph = null;
                sourceObject = null;
                grabberObject = null;
                grabber = null;
                capGrabber = null;
                control = null;

            }
        }
コード例 #27
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void Init()
        {
            try
            {
                log.Trace("Start worker thread");
                // Create the main graph
                _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                // Create the grabber
                _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                // Add the source and grabber to the main graph
                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    _grabber.SetMediaType(mediaType);

                    if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (_grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    _capGrabber.Width = header.BmiHeader.Width;
                                    _capGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch
                                {
                                    // Trace
                                    log.InfoFormat("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                    _grabber.SetBufferSamples(false);
                    _grabber.SetOneShot(false);
                    _grabber.SetCallback(_capGrabber, 1);
                    log.Trace("_grabber set up");

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    _control = (IMediaControl)_graph;
                    _control.Run();
                    log.Trace("control runs");

                    // Wait for the stop signal
                    //while (!_stopSignal.WaitOne(0, true))
                    //{
                    //    Thread.Sleep(10);
                    //}
                }
            }catch (Exception ex)
            {
                // Trace
                log.Debug(ex);
                Release();
            }
        }
コード例 #28
0
ファイル: Converter.cs プロジェクト: rickbassham/videobrowser
        /// <summary>Connects together to graph filters.</summary>
        /// <param name="graph">The graph on which the filters exist.</param>
        /// <param name="source">The source filter.</param>
        /// <param name="outPinName">The name of the output pin on the source filter.</param>
        /// <param name="destination">The destination filter.</param>
        /// <param name="inPinName">The name of the input pin on the destination filter.</param>
        protected void Connect(IGraphBuilder graph, IBaseFilter source, string outPinName, 
			IBaseFilter destination, string inPinName)
        {
            IPin outPin = source.FindPin(outPinName);
            DisposalCleanup.Add(outPin);

            IPin inPin = destination.FindPin(inPinName);
            DisposalCleanup.Add(inPin);

            graph.Connect(outPin, inPin);
        }
コード例 #29
0
    public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format,
                          MediaPortal.Core.Transcoding.Quality quality, Standard standard)
    {
      if (!Supports(format)) return false;
      string ext = System.IO.Path.GetExtension(info.file);
      if (ext.ToLower() != ".dvr-ms" && ext.ToLower() != ".sbe")
      {
        Log.Info("DVRMS2DIVX: wrong file format");
        return false;
      }

      //disable xvid status window while encoding
      /*  try
				{
					using (RegistryKey subkey = Registry.CurrentUser.OpenSubKey(@"Software\GNU\XviD", true))
					{
						if (subkey != null)
						{
							Int32 uivalue = 0;
							subkey.SetValue("display_status", (Int32)uivalue);
							subkey.SetValue("debug", (Int32)uivalue);
							subkey.SetValue("bitrate", (Int32)bitrate);

							uivalue = 1;
							subkey.SetValue("interlacing", (Int32)uivalue);
						}
					}
				}
				catch (Exception)
				{
				}*/
      //Type comtype = null;
      //object comobj = null;
      try
      {
        graphBuilder = (IGraphBuilder)new FilterGraph();

        _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);

        Log.Info("DVRMS2DIVX: add filesource");
        bufferSource = (IStreamBufferSource)new StreamBufferSource();

        IBaseFilter filter = (IBaseFilter)bufferSource;
        graphBuilder.AddFilter(filter, "SBE SOURCE");
        IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource;
        Log.Info("DVRMS2DIVX: load file:{0}", info.file);
        int hr = fileSource.Load(info.file, null);


        /*string strDemuxerMoniker = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{AFB6C280-2C41-11D3-8A60-0000F81E0E4A}";

				mpegDemuxer = Marshal.BindToMoniker(strDemuxerMoniker) as IBaseFilter;
				if (mpegDemuxer == null)
				{
						Log.Error("DVRMS2DIVX:FAILED:unable to add mpeg2 demuxer");
						Cleanup();
						return false;
				}
				hr = graphBuilder.AddFilter(mpegDemuxer, "MPEG-2 Demultiplexer");
				if (hr != 0)
				{
						Log.Error("DVRMS2DIVX:FAILED:Add mpeg2 demuxer to filtergraph :0x{0:X}", hr);
						Cleanup();
						return false;
				}*/

        //add mpeg2 audio/video codecs
        string strVideoCodecMoniker =
          @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{39F498AF-1A09-4275-B193-673B0BA3D478}";
        string strAudioCodec = "MPC - MPA Decoder Filter";
        Log.Info("DVRMS2DIVX: add MPV mpeg2 video decoder");
        Mpeg2VideoCodec = Marshal.BindToMoniker(strVideoCodecMoniker) as IBaseFilter;
        if (Mpeg2VideoCodec == null)
        {
          Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 video decoder");
          Cleanup();
          return false;
        }
        hr = graphBuilder.AddFilter(Mpeg2VideoCodec, "MPC - MPEG-2 Video Decoder (Gabest)");
        if (hr != 0)
        {
          Log.Error("DVRMS2DIVX:FAILED:Add MPV mpeg2 video  to filtergraph :0x{0:X}", hr);
          Cleanup();
          return false;
        }

        Log.Info("DVRMS2DIVX: add MPA mpeg2 audio codec:{0}", strAudioCodec);
        Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
        if (Mpeg2AudioCodec == null)
        {
          Log.Error("DVRMS2DIVX:FAILED:unable to add MPV mpeg2 audio codec");
          Cleanup();
          return false;
        }

        //connect output #0 of streambuffer source->mpeg2 audio codec pin 1
        //connect output #1 of streambuffer source->mpeg2 video codec pin 1
        Log.Info("DVRMS2DIVX: connect streambufer source->mpeg audio/video decoders");
        IPin pinOut0, pinOut1;
        IPin pinIn0, pinIn1;
        pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio
        pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video
        if (pinOut0 == null || pinOut1 == null)
        {
          Log.Error("DVRMS2DIVX:FAILED:unable to get pins of source");
          Cleanup();
          return false;
        }

        pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video
        pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio
        if (pinIn0 == null || pinIn1 == null)
        {
          Log.Error("DVRMS2DIVX:FAILED:unable to get pins of mpeg2 video/audio codec");
          Cleanup();
          return false;
        }

        hr = graphBuilder.Connect(pinOut0, pinIn1);
        if (hr != 0)
        {
          Log.Error("DVRMS2DIVX:FAILED:unable to connect audio pins :0x{0:X}", hr);
          Cleanup();
          return false;
        }


        hr = graphBuilder.Connect(pinOut1, pinIn0);
        if (hr != 0)
        {
          Log.Error("DVRMS2DIVX:FAILED:unable to connect video pins :0x{0:X}", hr);
          Cleanup();
          return false;
        }
        if (!AddCodecs(graphBuilder, info)) return false;

        //				hr=(graphBuilder as IMediaFilter).SetSyncSource(null);
        //				if (hr!=0)
        //					Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr);
        mediaControl = graphBuilder as IMediaControl;
        mediaSeeking = bufferSource as IStreamBufferMediaSeeking;
        mediaEvt = graphBuilder as IMediaEventEx;
        mediaPos = graphBuilder as IMediaPosition;

        //get file duration
        Log.Info("DVRMS2DIVX: Get duration of movie");
        long lTime = 5 * 60 * 60;
        lTime *= 10000000;
        long pStop = 0;
        hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                       AMSeekingSeekingFlags.NoPositioning);
        if (hr == 0)
        {
          long lStreamPos;
          mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
          m_dDuration = lStreamPos;
          lTime = 0;
          mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                    AMSeekingSeekingFlags.NoPositioning);
        }
        double duration = m_dDuration / 10000000d;
        Log.Info("DVRMS2DIVX: movie duration:{0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration));

        //				hr=(graphBuilder as IMediaFilter).SetSyncSource(null);
        //				if (hr!=0)
        //					Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr);
        hr = mediaControl.Run();
        if (hr != 0)
        {
          Log.Error("DVRMS2DIVX:FAILED:unable to start graph :0x{0:X}", hr);
          Cleanup();
          return false;
        }
        int maxCount = 20;
        while (true)
        {
          long lCurrent;
          mediaSeeking.GetCurrentPosition(out lCurrent);
          double dpos = (double)lCurrent;
          dpos /= 10000000d;
          System.Threading.Thread.Sleep(100);
          if (dpos >= 2.0d) break;
          maxCount--;
          if (maxCount <= 0) break;
        }

        mediaControl.Stop();
        FilterState state;
        mediaControl.GetState(500, out state);
        GC.Collect();
        GC.Collect();
        GC.Collect();
        GC.WaitForPendingFinalizers();
        graphBuilder.RemoveFilter(aviMuxer);
        graphBuilder.RemoveFilter(divxCodec);
        graphBuilder.RemoveFilter(mp3Codec);
        graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter);
        if (!AddCodecs(graphBuilder, info)) return false;

        //				hr=(graphBuilder as IMediaFilter).SetSyncSource(null);
        //			if (hr!=0)
        //					Log.Error("DVRMS2DIVX:FAILED:to SetSyncSource :0x{0:X}",hr);

        Log.Info("DVRMS2DIVX: start transcoding");
        hr = mediaControl.Run();
        if (hr != 0)
        {
          Log.Error("DVRMS2DIVX:FAILED:unable to start graph :0x{0:X}", hr);
          Cleanup();
          return false;
        }
      }
      catch (Exception ex)
      {
        Log.Error("DVRMS2DIVX:Unable create graph: {0}", ex.Message);
        Cleanup();
        return false;
      }
      return true;
    }
コード例 #30
0
ファイル: VideoStream.cs プロジェクト: ewin66/intvideosurv
        // Thread entry point
        public void WorkerThread()
        {
            bool failed = false;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder     graph       = null;
            IBaseFilter       sourceBase  = null;
            IBaseFilter       grabberBase = null;
            ISampleGrabber    sg          = null;
            IFileSourceFilter fileSource  = null;
            IMediaControl     mc          = null;
            IMediaEventEx     mediaEvent  = null;

            int code, param1, param2;

            while ((!failed) && (!stopEvent.WaitOne(0, true)))
            {
                try
                {
                    // Get type for filter graph
                    Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating filter graph");
                    }

                    // create filter graph
                    graphObj = Activator.CreateInstance(srvType);
                    graph    = (IGraphBuilder)graphObj;

                    // Get type for windows media source filter
                    srvType = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating WM source");
                    }

                    // create windows media source filter
                    sourceObj  = Activator.CreateInstance(srvType);
                    sourceBase = (IBaseFilter)sourceObj;

                    // Get type for sample grabber
                    srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating sample grabber");
                    }

                    // create sample grabber
                    grabberObj  = Activator.CreateInstance(srvType);
                    sg          = (ISampleGrabber)grabberObj;
                    grabberBase = (IBaseFilter)grabberObj;

                    // add source filter to graph
                    graph.AddFilter(sourceBase, "source");
                    graph.AddFilter(grabberBase, "grabber");

                    // set media type
                    AMMediaType mt = new AMMediaType();
                    mt.majorType = MediaType.Video;
                    mt.subType   = MediaSubType.RGB24;
                    sg.SetMediaType(mt);

                    // load file
                    fileSource = (IFileSourceFilter)sourceObj;
                    fileSource.Load(this.source, null);

                    // connect pins
                    if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                    {
                        throw new ApplicationException("Failed connecting filters");
                    }

                    // get media type
                    if (sg.GetConnectedMediaType(mt) == 0)
                    {
                        VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                        grabber.Width  = vih.BmiHeader.Width;
                        grabber.Height = vih.BmiHeader.Height;
                        mt.Dispose();
                    }

                    // render
                    graph.Render(DSTools.GetOutPin(grabberBase, 0));

                    //
                    sg.SetBufferSamples(false);
                    sg.SetOneShot(false);
                    sg.SetCallback(grabber, 1);

                    // window
                    IVideoWindow win = (IVideoWindow)graphObj;
                    win.put_AutoShow(false);
                    win = null;

                    // get events interface
                    mediaEvent = (IMediaEventEx)graphObj;

                    // get media control
                    mc = (IMediaControl)graphObj;

                    // run
                    mc.Run();

                    while (!stopEvent.WaitOne(0, true))
                    {
                        Thread.Sleep(100);

                        // get an event
                        if (mediaEvent.GetEvent(out code, out param1, out param2, 0) == 0)
                        {
                            // release params
                            mediaEvent.FreeEventParams(code, param1, param2);

                            //
                            if (code == (int)EventCode.Complete)
                            {
                                break;
                            }
                        }
                    }

                    mc.StopWhenReady();
                }
                // catch any exceptions
                catch (Exception e)
                {
                    System.Diagnostics.Debug.WriteLine("----: " + e.Message);
                    failed = true;
                }
                // finalization block
                finally
                {
                    // release all objects
                    mediaEvent  = null;
                    mc          = null;
                    fileSource  = null;
                    graph       = null;
                    sourceBase  = null;
                    grabberBase = null;
                    sg          = null;

                    if (graphObj != null)
                    {
                        Marshal.ReleaseComObject(graphObj);
                        graphObj = null;
                    }
                    if (sourceObj != null)
                    {
                        Marshal.ReleaseComObject(sourceObj);
                        sourceObj = null;
                    }
                    if (grabberObj != null)
                    {
                        Marshal.ReleaseComObject(grabberObj);
                        grabberObj = null;
                    }
                }
            }
        }
コード例 #31
0
        /// <summary>
        /// Crée le graphe DirectShow avec LAVFilters
        /// </summary>
        void SetupLAVGraph()
        {
            //System.IO.FileStream fs = null;

            try
            {
                try
                {
                    /* Creates the GraphBuilder COM object */
                    _graph = new FilterGraphNoThread() as IGraphBuilder;

                    if (_graph == null)
                    {
                        throw new Exception("Could not create a graph");
                    }

                    //if (_graphLogLocation != null)
                    //{
                    //    fs = System.IO.File.Create(_graphLogLocation);
                    //    int r = _graph.SetLogFile(fs.Handle);
                    //}

                    //
                    // Creating FileSource filter
                    //
                    IBaseFilter sourceFilter = GraphHelper.CreateLAVSplitterSourceFilter(_graph, _sourceFilePath, out IPin parserVideoOutputPin, out IPin parserAudioOutputPin);

                    //
                    // Creating renderer
                    //
                    Type        videoRendererFilterType = null;
                    IBaseFilter videoRenderer           = null;
                    GraphHelper.CreateFilter(NULL_RENDERER, "Null Renderer", ref videoRendererFilterType, ref videoRenderer);
                    int hr = _graph.AddFilter(videoRenderer, "Null Renderer");
                    DsError.ThrowExceptionForHR(hr);

                    IPin videoRendererInputPin = DsFindPin.ByDirection(videoRenderer, PinDirection.Input, 0);

                    //
                    // Creating Video filter
                    //

                    // Connection du sample Grabber
                    var sampleGrabberFilter = (IBaseFilter)_sampleGrabber;
                    hr = _graph.AddFilter(sampleGrabberFilter, "Sample Grabber");
                    DsError.ThrowExceptionForHR(hr);
                    IPin sampleGrabberInputPin = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Input, 0);
                    IPin sampleGrabberOuputPin = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Output, 0);

                    //Insertion du Color Space Converter
                    Type        filterType          = null;
                    IBaseFilter colorSpaceConverter = null;
                    GraphHelper.CreateFilter(GraphHelper.CLSID_COLOR_SPACE_CONVERTER, GraphHelper.COLOR_SPACE_CONVERTER_FRIENDLYNAME, ref filterType, ref colorSpaceConverter);
                    hr = _graph.AddFilter(colorSpaceConverter, GraphHelper.COLOR_SPACE_CONVERTER_FRIENDLYNAME);
                    DsError.ThrowExceptionForHR(hr);
                    IPin colorSpaceConverterInputPin = DsFindPin.ByDirection(colorSpaceConverter, PinDirection.Input, 0);
                    GraphHelper.ConnectLAVSplitterAndRendererWithLAVDecoder(_graph, parserVideoOutputPin, colorSpaceConverterInputPin);
                    IPin colorSpaceConverterOutputPin = DsFindPin.ByDirection(colorSpaceConverter, PinDirection.Output, 0);
                    hr = _graph.ConnectDirect(colorSpaceConverterOutputPin, sampleGrabberInputPin, null);
                    DsError.ThrowExceptionForHR(hr);

                    hr = _graph.Connect(sampleGrabberOuputPin, videoRendererInputPin);
                    DsError.ThrowExceptionForHR(hr);

                    // Removes the clock to run the graph as fast as possible
                    ((IMediaFilter)_graph).SetSyncSource(null);

                    GraphHelper.SafeRelease(parserAudioOutputPin);
                    GraphHelper.SafeRelease(parserVideoOutputPin);
                    GraphHelper.SafeRelease(videoRendererInputPin);
                    GraphHelper.SafeRelease(sampleGrabberInputPin);
                    GraphHelper.SafeRelease(sampleGrabberOuputPin);
                    GraphHelper.SafeRelease(colorSpaceConverterInputPin);
                    GraphHelper.SafeRelease(colorSpaceConverterOutputPin);

                    _mediaSeeking = _graph as IMediaSeeking;
                    _mediaControl = _graph as IMediaControl;
                    _mediaEvent   = _graph as IMediaEventEx;

                    /* Attempt to set the time format */
                    hr = _mediaSeeking.SetTimeFormat(TimeFormat.MediaTime);
                    DsError.ThrowExceptionForHR(hr);
                }
                catch (Exception ex)
                {
                    this.TraceError(ex, ex.Message);

                    /* This exection will happen usually if the media does
                     * not exist or could not open due to not having the
                     * proper filters installed */
                    FreeResources();
                }
            }
            finally
            {
                //if (_graphLogLocation != null && fs != null)
                //    fs.Close();
            }
        }
コード例 #32
0
    public static void RemoveFromGraph(IGraphBuilder graphBuilder)
    {
      IBaseFilter vob = null;
      DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubAutoload, out vob);
      if (vob == null)
      {
        //Try the "normal" filter then.
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubNormal, out vob);
        if (vob == null)
          return;
      }

      Log.Info("VideoPlayerVMR9: DirectVobSub in graph, removing...");
      // Check where video inputs are connected
      IPin pinVideoIn = DsFindPin.ByDirection(vob, PinDirection.Input, 0);
      IPin pinVideoOut = DsFindPin.ByDirection(vob, PinDirection.Output, 0);

      //find directvobsub's video output pin source input pin
      IPin pinVideoTo = null;
      pinVideoOut.ConnectedTo(out pinVideoTo);
      //find directvobsub's video input pin source output pin
      IPin pinVideoFrom = null;
      pinVideoIn.ConnectedTo(out pinVideoFrom);

      int hr;

      if (pinVideoFrom != null)
      {
        hr = pinVideoFrom.Disconnect();
        if (hr != 0)
        {
          Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting source pin");
        }
      }

      if (pinVideoTo != null)
      {
        hr = pinVideoTo.Disconnect();
        if (hr != 0)
        {
          Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting destination pin");
        }
      }

      //remove the DirectVobSub filter from the graph
      graphBuilder.RemoveFilter(vob);
      DirectShowUtil.ReleaseComObject(vob);
      vob = null;

      //reconnect the source output pin to the vmr9/evr filter
      hr = graphBuilder.Connect(pinVideoFrom, pinVideoTo);
      //hr = graphBuilder.Render(pinVideoFrom);

      DirectShowUtil.ReleaseComObject(pinVideoFrom);
      pinVideoFrom = null;
      DirectShowUtil.ReleaseComObject(pinVideoTo);
      pinVideoTo = null;
      DirectShowUtil.ReleaseComObject(pinVideoOut);
      pinVideoOut = null;
      DirectShowUtil.ReleaseComObject(pinVideoIn);
      pinVideoIn = null;

      if (hr != 0)
        Log.Error("VideoPlayerVMR9: Could not connect video out to video renderer: {0}", hr);
      else
        Log.Debug("VideoPlayerVMR9: DirectVobSub graph rebuild finished");
    }
コード例 #33
0
    private bool AddCodecs(IGraphBuilder graphBuilder, TranscodeInfo info)
    {
      int hr;
      Log.Info("DVRMS2DIVX: add ffdshow (Divx) codec to graph");
      string monikerXVID = @"@device:sw:{33D9A760-90C8-11D0-BD43-00A0C911CE86}\ffdshow video encoder";
      divxCodec = Marshal.BindToMoniker(monikerXVID) as IBaseFilter;
      if (divxCodec == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:Unable to create Divx MPEG-4 Codec");
        Cleanup();
        return false;
      }

      hr = graphBuilder.AddFilter(divxCodec, "ffdshow video encoder");
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:Add DivX MPEG-4 Codec to filtergraph :0x{0:X}", hr);
        Cleanup();
        return false;
      }


      Log.Info("DVRMS2DIVX: add MPEG3 codec to graph");
      string monikerMPEG3 = @"@device:cm:{33D9A761-90C8-11D0-BD43-00A0C911CE86}\85MPEG Layer-3";
      mp3Codec = Marshal.BindToMoniker(monikerMPEG3) as IBaseFilter;
      if (mp3Codec == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:Unable to create MPEG Layer-3 Codec");
        Cleanup();
        return false;
      }

      hr = graphBuilder.AddFilter(mp3Codec, "MPEG Layer-3");
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:Add MPEG Layer-3 to filtergraph :0x{0:X}", hr);
        Cleanup();
        return false;
      }

      //add filewriter 
      Log.Info("DVRMS2DIVX: add FileWriter to graph");
      string monikerFileWrite =
        @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{8596E5F0-0DA5-11D0-BD21-00A0C911CE86}";
      IBaseFilter fileWriterbase = Marshal.BindToMoniker(monikerFileWrite) as IBaseFilter;
      if (fileWriterbase == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:Unable to create FileWriter");
        Cleanup();
        return false;
      }


      fileWriterFilter = fileWriterbase as IFileSinkFilter2;
      if (fileWriterFilter == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:Add unable to get IFileSinkFilter for filewriter");
        Cleanup();
        return false;
      }

      hr = graphBuilder.AddFilter(fileWriterbase, "FileWriter");
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:Add FileWriter to filtergraph :0x{0:X}", hr);
        Cleanup();
        return false;
      }


      //set output filename
      //AMMediaType mt = new AMMediaType();
      string outputFileName = System.IO.Path.ChangeExtension(info.file, ".avi");
      Log.Info("DVRMS2DIVX: set output file to :{0}", outputFileName);
      hr = fileWriterFilter.SetFileName(outputFileName, null);
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:unable to set filename for filewriter :0x{0:X}", hr);
        Cleanup();
        return false;
      }

      // add avi muxer
      Log.Info("DVRMS2DIVX: add AVI Muxer to graph");
      string monikerAviMuxer =
        @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{E2510970-F137-11CE-8B67-00AA00A3F1A6}";
      aviMuxer = Marshal.BindToMoniker(monikerAviMuxer) as IBaseFilter;
      if (aviMuxer == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:Unable to create AviMux");
        Cleanup();
        return false;
      }


      hr = graphBuilder.AddFilter(aviMuxer, "AviMux");
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:Add AviMux to filtergraph :0x{0:X}", hr);
        Cleanup();
        return false;
      }


      //connect output of mpeg2 codec to xvid codec
      Log.Info("DVRMS2DIVX: connect mpeg2 video codec->divx codec");
      IPin pinOut, pinIn;
      pinIn = DsFindPin.ByDirection(divxCodec, PinDirection.Input, 0);
      if (pinIn == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of divx codec:0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinOut = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of mpeg2 video codec :0x{0:X}", hr);
        Cleanup();
        return false;
      }

      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:unable to connect mpeg2 video codec->divx:0x{0:X}", hr);
        Cleanup();
        return false;
      }

      //connect output of mpeg2 audio codec to mpeg3 codec
      Log.Info("DVRMS2DIVX: connect mpeg2 audio codec->mp3 codec");
      pinIn = DsFindPin.ByDirection(mp3Codec, PinDirection.Input, 0);
      if (pinIn == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of mp3 codec:0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinOut = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of mpeg2 audio codec :0x{0:X}", hr);
        Cleanup();
        return false;
      }

      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:unable to connect mpeg2 audio codec->mpeg3:0x{0:X}", hr);
        Cleanup();
        return false;
      }


      //connect output of mpeg3 codec to pin#0 of avimux
      Log.Info("DVRMS2DIVX: connect mp3 codec->avimux");
      pinOut = DsFindPin.ByDirection(mp3Codec, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of mp3 codec:0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinIn = DsFindPin.ByDirection(aviMuxer, PinDirection.Input, 0);
      if (pinIn == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of mpeg2 audio codec :0x{0:X}", hr);
        Cleanup();
        return false;
      }

      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:unable to connect mpeg3 codec->avimux:0x{0:X}", hr);
        Cleanup();
        return false;
      }

      //connect output of xvid codec to pin#1 of avimux
      Log.Info("DVRMS2DIVX: connect divx codec->avimux");
      pinOut = DsFindPin.ByDirection(divxCodec, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of mp3 codec:0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinIn = DsFindPin.ByDirection(aviMuxer, PinDirection.Input, 1);
      if (pinIn == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get output#1 pin of avimux :0x{0:X}", hr);
        Cleanup();
        return false;
      }

      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:unable to connect divx codec->avimux:0x{0:X}", hr);
        Cleanup();
        return false;
      }


      //connect avi mux out->filewriter in
      Log.Info("DVRMS2DIVX: connect avimux->filewriter");
      pinOut = DsFindPin.ByDirection(aviMuxer, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get output pin of avimux:0x{0:X}", hr);
        Cleanup();
        return false;
      }

      pinIn = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 0);
      if (pinIn == null)
      {
        Log.Error("DVRMS2DIVX:FAILED:cannot get input pin of Filewriter :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("DVRMS2DIVX:FAILED:connect muxer->filewriter :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      return true;
    }
コード例 #34
0
ファイル: VideoCapture.cs プロジェクト: culiniac/quavs
        /// <summary>
        /// build the capture graph for grabber.
        /// </summary>
        /// <param name="strCapture">The STR capture.</param>
        /// <param name="strCompressor">The STR compressor.</param>
        /// <param name="strFileName">Name of the STR file.</param>
        /// <param name="iFrameRate">The i frame rate.</param>
        /// <param name="iWidth">Width of the i.</param>
        /// <param name="iHeight">Height of the i.</param>
        /// <param name="owner">The owner.</param>
        /// <param name="record">if set to <c>true</c> [record].</param>
        private void SetupGraph(string strCapture, string strCompressor, string strFileName, int iFrameRate, int iWidth, int iHeight, IntPtr owner, bool record)
        {
            ICaptureGraphBuilder2 captureGraphBuilder = null;
            ISampleGrabber sampGrabber = null;
            IBaseFilter theIPinTee = null;
            IBaseFilter mux = null;
            IFileSinkFilter sink = null;
            IBaseFilter captureDevice = null;
            IBaseFilter captureCompressor = null;
            IBaseFilter theRenderer = null;
            int hr = 0;

            try
            {
                //Create the filter for the selected video input
                captureDevice = CreateFilter(FilterCategory.VideoInputDevice, strCapture);

                //Create the filter for the selected video compressor
                captureCompressor = CreateFilter(FilterCategory.VideoCompressorCategory, strCompressor);

                //Create the Graph
                _graphBuilder = (IGraphBuilder)new FilterGraph();

                //Create the Capture Graph Builder
                captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

                // Attach the filter graph to the capture graph
                hr = captureGraphBuilder.SetFiltergraph(this._graphBuilder);
                checkHR(hr, "Error attaching filter graph to capture graph");

                //Add the Video input device to the graph
                hr = _graphBuilder.AddFilter(captureDevice, "QUAVS input filter");
                checkHR(hr, "Error attaching video input");

                //setup cature device
                SetConfigParms(captureGraphBuilder, captureDevice, iFrameRate, iWidth, iHeight);

                //Add a sample grabber
                sampGrabber = (ISampleGrabber)new SampleGrabber();
                ConfigureSampleGrabber(sampGrabber);
                hr = _graphBuilder.AddFilter((IBaseFilter)sampGrabber, "QUAVS SampleGrabber");
                checkHR(hr, "Error adding sample grabber");

                //connect capture device to SampleGrabber
                hr = _graphBuilder.Connect(GetPin(captureDevice, "Capture"), GetPin((IBaseFilter)sampGrabber, "Input"));
                checkHR(hr, "Error attaching sample grabber to capture pin");

                //Add Ininite Pin Tee
                theIPinTee = (IBaseFilter)new InfTee();
                hr = _graphBuilder.AddFilter(theIPinTee, "QUAVS Pin Tee");
                checkHR(hr, "Error adding infinite tee pin");

                //connect capture SampleGrabber to IPinTee
                hr = _graphBuilder.Connect(GetPin((IBaseFilter)sampGrabber, "Output"), GetPin(theIPinTee, "Input"));
                checkHR(hr, "Error adding SampleGrabber");

                if (record)
                {
                    //Add the Video compressor filter to the graph
                    hr = _graphBuilder.AddFilter(captureCompressor, "QUAVS compressor filter");
                    checkHR(hr, "Error adding compressor filter");

                    //connect capture IPinTee output1 to compressor
                    hr = _graphBuilder.Connect(GetPin(theIPinTee, "Output1"), GetPin(captureCompressor, "Input"));
                    checkHR(hr, "Error adding TO DO");

                    //Create the file writer part of the graph. SetOutputFileName does this for us, and returns the mux and sink
                    hr = captureGraphBuilder.SetOutputFileName(MediaSubType.Avi, strFileName, out mux, out sink);
                    checkHR(hr, "Error adding mux filter or setting output file name");

                    //connect compressor to mux output
                    hr = _graphBuilder.Connect(GetPin(captureCompressor, "Output"), GetPin(mux, "Input 01"));
                    checkHR(hr, "Error connecting the compressor to mux");

                    // Get the default video renderer
                    theRenderer = new VideoRendererDefault() as IBaseFilter;
                    hr = _graphBuilder.AddFilter(theRenderer, "Renderer");
                    checkHR(hr, "Error adding screen renderer");

                    //connect capture TO DO
                    hr = _graphBuilder.Connect(GetPin(theIPinTee, "Output2"), GetPin(theRenderer, "VMR Input0"));
                    checkHR(hr, "Error connecting screen renderer");
                }
                else
                {
                    // Get the default video renderer
                    theRenderer = new VideoRendererDefault() as IBaseFilter;
                    hr = _graphBuilder.AddFilter(theRenderer, "Renderer");
                    checkHR(hr, "Error adding screen renderer");

                    //connect capture TO DO
                    hr = _graphBuilder.Connect(GetPin(theIPinTee, "Output1"), GetPin(theRenderer, "VMR Input0"));
                    checkHR(hr, "Error connecting screen renderer");
                }

                SaveSizeInfo(sampGrabber);
            #if DEBUG
                _rot = new DsROTEntry(_graphBuilder);
            #endif

                if (owner != IntPtr.Zero)
                {
                    //get the video window from the graph
                    IVideoWindow videoWindow = null;
                    videoWindow = (IVideoWindow)_graphBuilder;

                    //Set the owener of the videoWindow to an IntPtr of some sort (the Handle of any control - could be a form / button etc.)
                    hr = videoWindow.put_Owner(owner);
                    DsError.ThrowExceptionForHR(hr);

                    //Set the style of the video window
                    hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren);
                    DsError.ThrowExceptionForHR(hr);

                    hr = videoWindow.SetWindowPosition(0, 0, iWidth, iHeight);
                    DsError.ThrowExceptionForHR(hr);

                    // Make the video window visible
                    hr = videoWindow.put_Visible(OABool.True);
                    DsError.ThrowExceptionForHR(hr);
                }

                //Create the media control for controlling the graph
                _mediaCtrl = (IMediaControl)this._graphBuilder;

            }
            catch (Exception e)
            {
                TraceException.WriteLine(e);
                //Trace.WriteLine(e.Message);
                CloseInterfaces();
            }
            finally
            {
                if (sink != null)
                {
                    Marshal.ReleaseComObject(sink);
                    sink = null;
                }
                if (mux != null)
                {
                    Marshal.ReleaseComObject(mux);
                    mux = null;
                }
                if (captureGraphBuilder != null)
                {
                    Marshal.ReleaseComObject(captureGraphBuilder);
                    captureGraphBuilder = null;
                }
                if (sampGrabber != null)
                {
                    Marshal.ReleaseComObject(sampGrabber);
                    sampGrabber = null;
                }
                if (theIPinTee != null)
                {
                    Marshal.ReleaseComObject(theIPinTee);
                    theIPinTee = null;
                }
                if (captureDevice != null)
                {
                    Marshal.ReleaseComObject(captureDevice);
                    captureDevice = null;
                }
                if (captureCompressor != null)
                {
                    Marshal.ReleaseComObject(captureCompressor);
                    captureCompressor = null;
                }
                if (theRenderer != null)
                {
                    Marshal.ReleaseComObject(theRenderer);
                    theRenderer = null;
                }
            }
        }
コード例 #35
0
ファイル: FileVideoSource.cs プロジェクト: Tob1112/405sentry
        /// <summary>
        /// Worker thread.
        /// </summary>
        ///
        private void WorkerThread( )
        {
            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object sourceObject  = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder     graph         = null;
            IBaseFilter       sourceBase    = null;
            IBaseFilter       grabberBase   = null;
            ISampleGrabber    sampleGrabber = null;
            IMediaControl     mediaControl  = null;
            IFileSourceFilter fileSource    = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                type = Type.GetTypeFromCLSID(Clsid.AsyncReader);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter async reader");
                }

                sourceObject = Activator.CreateInstance(type);
                sourceBase   = (IBaseFilter)sourceObject;
                fileSource   = (IFileSourceFilter)sourceObject;

                fileSource.Load(fileName, null);

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add source and grabber filters to graph
                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType( );
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                if (graph.Connect(Tools.GetOutPin(sourceBase, 0), Tools.GetInPin(grabberBase, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose( );
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // run
                mediaControl.Run( );

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mediaControl.StopWhenReady( );
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                sourceBase    = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                fileSource    = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceObject != null)
                {
                    Marshal.ReleaseComObject(sourceObject);
                    sourceObject = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }
        }
コード例 #36
0
 public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard)
 {
   try
   {
     if (!Supports(format)) return false;
     string ext = System.IO.Path.GetExtension(info.file);
     if (ext.ToLowerInvariant() != ".dvr-ms" && ext.ToLowerInvariant() != ".sbe")
     {
       Log.Info("DVRMS2WMV: wrong file format");
       return false;
     }
     Log.Info("DVRMS2WMV: create graph");
     graphBuilder = (IGraphBuilder)new FilterGraph();
     _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);
     Log.Info("DVRMS2WMV: add streambuffersource");
     bufferSource = (IStreamBufferSource)new StreamBufferSource();
     IBaseFilter filter = (IBaseFilter)bufferSource;
     graphBuilder.AddFilter(filter, "SBE SOURCE");
     Log.Info("DVRMS2WMV: load file:{0}", info.file);
     IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource;
     int hr = fileSource.Load(info.file, null);
     //add mpeg2 audio/video codecs
     string strVideoCodec = "";
     string strAudioCodec = "";
     using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
     {
       strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "MPC - MPEG-2 Video Decoder (Gabest)");
       strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "MPC - MPA Decoder Filter");
     }
     Log.Info("DVRMS2WMV: add mpeg2 video codec:{0}", strVideoCodec);
     Mpeg2VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
     if (hr != 0)
     {
       Log.Error("DVRMS2WMV:FAILED:Add mpeg2 video  to filtergraph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     Log.Info("DVRMS2WMV: add mpeg2 audio codec:{0}", strAudioCodec);
     Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
     if (Mpeg2AudioCodec == null)
     {
       Log.Error("DVRMS2WMV:FAILED:unable to add mpeg2 audio codec");
       Cleanup();
       return false;
     }
     Log.Info("DVRMS2WMV: connect streambufer source->mpeg audio/video decoders");
     //connect output #0 of streambuffer source->mpeg2 audio codec pin 1
     //connect output #1 of streambuffer source->mpeg2 video codec pin 1
     IPin pinOut0, pinOut1;
     IPin pinIn0, pinIn1;
     pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio
     pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video
     if (pinOut0 == null || pinOut1 == null)
     {
       Log.Error("DVRMS2WMV:FAILED:unable to get pins of source");
       Cleanup();
       return false;
     }
     pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video
     pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio
     if (pinIn0 == null || pinIn1 == null)
     {
       Log.Error("DVRMS2WMV:FAILED:unable to get pins of mpeg2 video/audio codec");
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut0, pinIn1);
     if (hr != 0)
     {
       Log.Error("DVRMS2WMV:FAILED:unable to connect audio pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut1, pinIn0);
     if (hr != 0)
     {
       Log.Error("DVRMS2WMV:FAILED:unable to connect video pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv");
     if (!AddWmAsfWriter(outputFilename, quality, standard)) return false;
     Log.Info("DVRMS2WMV: start pre-run");
     mediaControl = graphBuilder as IMediaControl;
     mediaSeeking = bufferSource as IStreamBufferMediaSeeking;
     mediaEvt = graphBuilder as IMediaEventEx;
     mediaPos = graphBuilder as IMediaPosition;
     //get file duration
     long lTime = 5 * 60 * 60;
     lTime *= 10000000;
     long pStop = 0;
     hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                    AMSeekingSeekingFlags.NoPositioning);
     if (hr == 0)
     {
       long lStreamPos;
       mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
       m_dDuration = lStreamPos;
       lTime = 0;
       mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                 AMSeekingSeekingFlags.NoPositioning);
     }
     double duration = m_dDuration / 10000000d;
     Log.Info("DVRMS2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration));
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("DVRMS2WMV:FAILED:unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     int maxCount = 20;
     while (true)
     {
       long lCurrent;
       mediaSeeking.GetCurrentPosition(out lCurrent);
       double dpos = (double)lCurrent;
       dpos /= 10000000d;
       System.Threading.Thread.Sleep(100);
       if (dpos >= 2.0d) break;
       maxCount--;
       if (maxCount <= 0) break;
     }
     Log.Info("DVRMS2WMV: pre-run done");
     Log.Info("DVRMS2WMV: Get duration of movie");
     mediaControl.Stop();
     FilterState state;
     mediaControl.GetState(500, out state);
     GC.Collect();
     GC.Collect();
     GC.Collect();
     GC.WaitForPendingFinalizers();
     Log.Info("DVRMS2WMV: reconnect mpeg2 video codec->ASF WM Writer");
     graphBuilder.RemoveFilter(fileWriterbase);
     if (!AddWmAsfWriter(outputFilename, quality, standard)) return false;
     Log.Info("DVRMS2WMV: Start transcoding");
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("DVRMS2WMV:FAILED:unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
   }
   catch (Exception e)
   {
     // TODO: Handle exceptions.
     Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message);
     return false;
   }
   return true;
 }
コード例 #37
0
    /// <summary> create the used COM components and get the interfaces. </summary>
    protected bool GetInterfaces()
    {
      VMR9Util.g_vmr9 = null;
      if (IsRadio == false)
      {
        Vmr9 = VMR9Util.g_vmr9 = new VMR9Util();

        // switch back to directx fullscreen mode
        Log.Info("RTSPPlayer: Enabling DX9 exclusive mode");
        GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null);
        GUIWindowManager.SendMessage(msg);
      }
      //Type comtype = null;
      //object comobj = null;

      DsRect rect = new DsRect();
      rect.top = 0;
      rect.bottom = GUIGraphicsContext.form.Height;
      rect.left = 0;
      rect.right = GUIGraphicsContext.form.Width;


      try
      {
        graphBuilder = (IGraphBuilder)new FilterGraph();

        Log.Info("RTSPPlayer: add source filter");
        if (IsRadio == false)
        {
          bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder);
          if (!AddVMR9)
          {
            Log.Error("RTSPPlayer:Failed to add VMR9 to graph");
            return false;
          }
          VMR9Util.g_vmr9.Enable(false);
        }

        _mpegDemux = (IBaseFilter)new MPEG2Demultiplexer();
        graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer");

        _rtspSource = (IBaseFilter)new RtpSourceFilter();
        int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter");
        if (hr != 0)
        {
          Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr);
          return false;
        }

        // add preferred video & audio codecs
        Log.Info("RTSPPlayer: add video/audio codecs");
        string strVideoCodec = "";
        string strAudioCodec = "";
        string strAudiorenderer = "";
        int intFilters = 0; // FlipGer: count custom filters
        string strFilters = ""; // FlipGer: collect custom filters
        string postProcessingFilterSection = "mytv";
        using (Settings xmlreader = new MPSettings())
        {
          if (_mediaType == g_Player.MediaType.Video)
          {
            strVideoCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", "");
            strAudioCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", "");
            strAudiorenderer = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device");
            postProcessingFilterSection = "movieplayer";
          }
          else
          {
            strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "");
            strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "");
            strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device");
            postProcessingFilterSection = "mytv";
          }
          enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false);
          // FlipGer: load infos for custom filters
          int intCount = 0;
          while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") !=
                 "undefined")
          {
            if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false))
            {
              strFilters +=
                xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") +
                ";";
              intFilters++;
            }
            intCount++;
          }
        }
        string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant();
        if (IsRadio == false)
        {
          if (strVideoCodec.Length > 0)
          {
            DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
          }
        }
        if (strAudioCodec.Length > 0)
        {
          DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
        }

        if (enableDvbSubtitles == true)
        {
          try
          {
            _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder);
            SubtitleRenderer.GetInstance().SetPlayer(this);
            dvbSubRenderer = SubtitleRenderer.GetInstance();
          }
          catch (Exception e)
          {
            Log.Error(e);
          }
        }

        Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null));
        // FlipGer: add custom filters to graph
        string[] arrFilters = strFilters.Split(';');
        for (int i = 0; i < intFilters; i++)
        {
          DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]);
        }
        if (strAudiorenderer.Length > 0)
        {
          audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false);
        }

        Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile);
        IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource;
        if (interfaceFile == null)
        {
          Log.Error("RTSPPlayer:Failed to get IFileSourceFilter");
          return false;
        }

        //Log.Info("RTSPPlayer: open file:{0}",filename);
        hr = interfaceFile.Load(m_strCurrentFile, null);
        if (hr != 0)
        {
          Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr);
          return false;
        }

        #region connect rtspsource->demux

        Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux");
        IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0);
        if (pinTsOut == null)
        {
          Log.Info("RTSPPlayer:failed to find output pin of tsfilesource");
          return false;
        }
        IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0);
        if (pinDemuxIn == null)
        {
          Log.Info("RTSPPlayer:failed to find output pin of tsfilesource");
          return false;
        }

        hr = graphBuilder.Connect(pinTsOut, pinDemuxIn);
        if (hr != 0)
        {
          Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr);
          return false;
        }
        DirectShowUtil.ReleaseComObject(pinTsOut);
        DirectShowUtil.ReleaseComObject(pinDemuxIn);

        #endregion

        #region render demux output pins

        if (IsRadio)
        {
          Log.Info("RTSPPlayer:render audio demux outputs");
          IEnumPins enumPins;
          _mpegDemux.EnumPins(out enumPins);
          IPin[] pins = new IPin[2];
          int fetched = 0;
          while (enumPins.Next(1, pins, out fetched) == 0)
          {
            if (fetched != 1)
            {
              break;
            }
            PinDirection direction;
            pins[0].QueryDirection(out direction);
            if (direction == PinDirection.Input)
            {
              continue;
            }
            IEnumMediaTypes enumMediaTypes;
            pins[0].EnumMediaTypes(out enumMediaTypes);
            AMMediaType[] mediaTypes = new AMMediaType[20];
            int fetchedTypes;
            enumMediaTypes.Next(20, mediaTypes, out fetchedTypes);
            for (int i = 0; i < fetchedTypes; ++i)
            {
              if (mediaTypes[i].majorType == MediaType.Audio)
              {
                graphBuilder.Render(pins[0]);
                break;
              }
            }
          }
        }
        else
        {
          Log.Info("RTSPPlayer:render audio/video demux outputs");
          IEnumPins enumPins;
          _mpegDemux.EnumPins(out enumPins);
          IPin[] pins = new IPin[2];
          int fetched = 0;
          while (enumPins.Next(1, pins, out fetched) == 0)
          {
            if (fetched != 1)
            {
              break;
            }
            PinDirection direction;
            pins[0].QueryDirection(out direction);
            if (direction == PinDirection.Input)
            {
              continue;
            }
            graphBuilder.Render(pins[0]);
          }
        }

        #endregion

        // Connect DVB subtitle filter pins in the graph
        if (_mpegDemux != null && enableDvbSubtitles == true)
        {
          IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer;
          hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr);

          if (hr == 0)
          {
            Log.Info("RTSPPlayer:_pinPcr OK");

            IPin pDemuxerPcr = DsFindPin.ByName(_mpegDemux, "Pcr");
            IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr");
            hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr);
          }
          else
          {
            Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr);
          }

          hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle);
          if (hr == 0)
          {
            Log.Info("RTSPPlayer:_pinSubtitle OK");

            IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle");
            IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "In");
            hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle);
          }
          else
          {
            Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr);
          }

          hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT);
          if (hr == 0)
          {
            Log.Info("RTSPPlayer:_pinPMT OK");

            IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT");
            IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "PMT");
            hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle);
          }
          else
          {
            Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr);
          }
        }


        if (IsRadio == false)
        {
          if (!VMR9Util.g_vmr9.IsVMR9Connected)
          {
            //VMR9 is not supported, switch to overlay
            Log.Info("RTSPPlayer: vmr9 not connected");
            _mediaCtrl = null;
            Cleanup();
            return false;
          }
          VMR9Util.g_vmr9.SetDeinterlaceMode();
        }

        _mediaCtrl = (IMediaControl)graphBuilder;
        mediaEvt = (IMediaEventEx)graphBuilder;
        _mediaSeeking = (IMediaSeeking)graphBuilder;
        mediaPos = (IMediaPosition)graphBuilder;
        basicAudio = graphBuilder as IBasicAudio;
        //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched);
        DirectShowUtil.EnableDeInterlace(graphBuilder);
        if (VMR9Util.g_vmr9 != null)
        {
          m_iVideoWidth = VMR9Util.g_vmr9.VideoWidth;
          m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight;
        }
        if (audioRendererFilter != null)
        {
          Log.Info("RTSPPlayer9:set reference clock");
          IMediaFilter mp = graphBuilder as IMediaFilter;
          IReferenceClock clock = audioRendererFilter as IReferenceClock;
          hr = mp.SetSyncSource(null);
          hr = mp.SetSyncSource(clock);
          Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr);
        }
        Log.Info("RTSPPlayer: graph build successfull");
        return true;
      }
      catch (Exception ex)
      {
        Error.SetError("Unable to play movie", "Unable build graph for VMR9");
        Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace);
        CloseInterfaces();
        return false;
      }
    }
コード例 #38
0
        public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format,
                              MediaPortal.Core.Transcoding.Quality quality, Standard standard)
        {
            if (!Supports(format))
            {
                return(false);
            }
            string ext = System.IO.Path.GetExtension(info.file);

            if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg")
            {
                Log.Info("TSReader2MP4: wrong file format");
                return(false);
            }
            try
            {
                graphBuilder = (IGraphBuilder) new FilterGraph();
                _rotEntry    = new DsROTEntry((IFilterGraph)graphBuilder);
                Log.Info("TSReader2MP4: add filesource");
                TsReader reader = new TsReader();
                tsreaderSource = (IBaseFilter)reader;
                IBaseFilter filter = (IBaseFilter)tsreaderSource;
                graphBuilder.AddFilter(filter, "TSReader Source");
                IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource;
                Log.Info("TSReader2MP4: load file:{0}", info.file);
                int hr = fileSource.Load(info.file, null);
                //add audio/video codecs
                string strVideoCodec     = "";
                string strH264VideoCodec = "";
                string strAudioCodec     = "";
                string strAACAudioCodec  = "";
                using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
                {
                    strVideoCodec     = xmlreader.GetValueAsString("mytv", "videocodec", "");
                    strAudioCodec     = xmlreader.GetValueAsString("mytv", "audiocodec", "");
                    strAACAudioCodec  = xmlreader.GetValueAsString("mytv", "aacaudiocodec", "");
                    strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", "");
                }
                //Find the type of decoder required for the output video & audio pins on TSReader.
                Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders");
                IPin pinOut0, pinOut1;
                IPin pinIn0, pinIn1;
                pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio
                pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video
                if (pinOut0 == null || pinOut1 == null)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader");
                    Cleanup();
                    return(false);
                }
                bool            usingAAC = false;
                IEnumMediaTypes enumMediaTypes;
                hr = pinOut0.EnumMediaTypes(out enumMediaTypes);
                while (true)
                {
                    AMMediaType[] mediaTypes = new AMMediaType[1];
                    int           typesFetched;
                    hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
                    if (hr != 0 || typesFetched == 0)
                    {
                        break;
                    }
                    if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
                    {
                        Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader");
                        usingAAC = true;
                    }
                }
                bool usingH264 = false;
                hr = pinOut1.EnumMediaTypes(out enumMediaTypes);
                while (true)
                {
                    AMMediaType[] mediaTypes = new AMMediaType[1];
                    int           typesFetched;
                    hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
                    if (hr != 0 || typesFetched == 0)
                    {
                        break;
                    }
                    if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1)
                    {
                        Log.Info("TSReader2MP4: found H.264 video out pin on tsreader");
                        usingH264 = true;
                    }
                }
                //Add the type of decoder required for the output video & audio pins on TSReader.
                Log.Info("TSReader2MP4: add audio/video decoders to graph");
                if (usingH264 == false)
                {
                    Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec);
                    VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
                    if (VideoCodec == null)
                    {
                        Log.Error("TSReader2MP4: unable to add mpeg2 video decoder");
                        Cleanup();
                        return(false);
                    }
                }
                else
                {
                    Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec);
                    VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec);
                    if (VideoCodec == null)
                    {
                        Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec");
                        Cleanup();
                        return(false);
                    }
                }
                if (usingAAC == false)
                {
                    Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec);
                    AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
                    if (AudioCodec == null)
                    {
                        Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec");
                        Cleanup();
                        return(false);
                    }
                }
                else
                {
                    Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec);
                    AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
                    if (AudioCodec == null)
                    {
                        Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec");
                        Cleanup();
                        return(false);
                    }
                }
                Log.Info("TSReader2MP4: connect tsreader->audio/video decoders");
                //connect output #0 (audio) of tsreader->audio decoder input pin 0
                //connect output #1 (video) of tsreader->video decoder input pin 0
                pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
                pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
                if (pinIn0 == null || pinIn1 == null)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs");
                    Cleanup();
                    return(false);
                }
                hr = graphBuilder.Connect(pinOut0, pinIn0);
                if (hr != 0)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
                hr = graphBuilder.Connect(pinOut1, pinIn1);
                if (hr != 0)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
                //add encoders, muxer & filewriter
                if (!AddCodecs(graphBuilder, info))
                {
                    return(false);
                }
                //setup graph controls
                mediaControl = graphBuilder as IMediaControl;
                mediaSeeking = tsreaderSource as IMediaSeeking;
                mediaEvt     = graphBuilder as IMediaEventEx;
                mediaPos     = graphBuilder as IMediaPosition;
                //get file duration
                Log.Info("TSReader2MP4: Get duration of recording");
                long lTime = 5 * 60 * 60;
                lTime *= 10000000;
                long pStop = 0;
                hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                               AMSeekingSeekingFlags.NoPositioning);
                if (hr == 0)
                {
                    long lStreamPos;
                    mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
                    m_dDuration = lStreamPos;
                    lTime       = 0;
                    mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                              AMSeekingSeekingFlags.NoPositioning);
                }
                double duration = m_dDuration / 10000000d;
                Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration));
                //run the graph to initialize the filters to be sure
                hr = mediaControl.Run();
                if (hr != 0)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
                int maxCount = 20;
                while (true)
                {
                    long lCurrent;
                    mediaSeeking.GetCurrentPosition(out lCurrent);
                    double dpos = (double)lCurrent;
                    dpos /= 10000000d;
                    System.Threading.Thread.Sleep(100);
                    if (dpos >= 2.0d)
                    {
                        break;
                    }
                    maxCount--;
                    if (maxCount <= 0)
                    {
                        break;
                    }
                }
                mediaControl.Stop();
                FilterState state;
                mediaControl.GetState(500, out state);
                GC.Collect();
                GC.Collect();
                GC.Collect();
                GC.WaitForPendingFinalizers();
                graphBuilder.RemoveFilter(mp4Muxer);
                graphBuilder.RemoveFilter(h264Encoder);
                graphBuilder.RemoveFilter(aacEncoder);
                graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter);
                if (!AddCodecs(graphBuilder, info))
                {
                    return(false);
                }
                //Set Encoder quality & Muxer settings
                if (!EncoderSet(graphBuilder, info))
                {
                    return(false);
                }
                //start transcoding - run the graph
                Log.Info("TSReader2MP4: start transcoding");
                //setup flow control
                //need to leverage CBAsePin, CPullPin & IAsyncReader methods.
                IAsyncReader synchVideo = null;
                mediaSample = VideoCodec as IMediaSample;
                hr          = synchVideo.SyncReadAligned(mediaSample);
                //So we only parse decoder output whent the encoders are ready.
                hr = mediaControl.Run();
                if (hr != 0)
                {
                    Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
            }
            catch (Exception ex)
            {
                Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message);
                Cleanup();
                return(false);
            }
            return(true);
        }
コード例 #39
0
ファイル: CCDDevice.cs プロジェクト: KrisJanssen/SIS
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString);

                // Create the grabber
                m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                m_grabberObject = m_isplGrabber as IBaseFilter;

                // Add the source and grabber to the main graph
                m_igrphbldGraph.AddFilter(m_sourceObject, "source");
                m_igrphbldGraph.AddFilter(m_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    m_isplGrabber.SetMediaType(mediaType);

                    if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    m_grbrCapGrabber.Width = header.BmiHeader.Width;
                                    m_grbrCapGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0));
                    m_isplGrabber.SetBufferSamples(false);
                    m_isplGrabber.SetOneShot(false);
                    m_isplGrabber.SetCallback(m_grbrCapGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    m_imedctrlControl = (IMediaControl)m_igrphbldGraph;
                    m_imedctrlControl.Run();

                    // Wait for the stop signal
                    while (!m_rstevStopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    // _control.StopWhenReady();
                    m_imedctrlControl.Stop();

                    // Wait a bit... It apparently takes some time to stop IMediaControl
                    Thread.Sleep(1000);
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                this.Release();
            }
        }
コード例 #40
0
 public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard)
 {
     try
     {
         if (!Supports(format))
         {
             return(false);
         }
         string ext = System.IO.Path.GetExtension(info.file);
         if (ext.ToLower() != ".dvr-ms" && ext.ToLower() != ".sbe")
         {
             Log.Info("DVRMS2WMV: wrong file format");
             return(false);
         }
         Log.Info("DVRMS2WMV: create graph");
         graphBuilder = (IGraphBuilder) new FilterGraph();
         _rotEntry    = new DsROTEntry((IFilterGraph)graphBuilder);
         Log.Info("DVRMS2WMV: add streambuffersource");
         bufferSource = (IStreamBufferSource) new StreamBufferSource();
         IBaseFilter filter = (IBaseFilter)bufferSource;
         graphBuilder.AddFilter(filter, "SBE SOURCE");
         Log.Info("DVRMS2WMV: load file:{0}", info.file);
         IFileSourceFilter fileSource = (IFileSourceFilter)bufferSource;
         int hr = fileSource.Load(info.file, null);
         //add mpeg2 audio/video codecs
         string strVideoCodec = "";
         string strAudioCodec = "";
         using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
         {
             strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "MPC - MPEG-2 Video Decoder (Gabest)");
             strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "MPC - MPA Decoder Filter");
         }
         Log.Info("DVRMS2WMV: add mpeg2 video codec:{0}", strVideoCodec);
         Mpeg2VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
         if (hr != 0)
         {
             Log.Error("DVRMS2WMV:FAILED:Add mpeg2 video  to filtergraph :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         Log.Info("DVRMS2WMV: add mpeg2 audio codec:{0}", strAudioCodec);
         Mpeg2AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
         if (Mpeg2AudioCodec == null)
         {
             Log.Error("DVRMS2WMV:FAILED:unable to add mpeg2 audio codec");
             Cleanup();
             return(false);
         }
         Log.Info("DVRMS2WMV: connect streambufer source->mpeg audio/video decoders");
         //connect output #0 of streambuffer source->mpeg2 audio codec pin 1
         //connect output #1 of streambuffer source->mpeg2 video codec pin 1
         IPin pinOut0, pinOut1;
         IPin pinIn0, pinIn1;
         pinOut0 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 0); //audio
         pinOut1 = DsFindPin.ByDirection((IBaseFilter)bufferSource, PinDirection.Output, 1); //video
         if (pinOut0 == null || pinOut1 == null)
         {
             Log.Error("DVRMS2WMV:FAILED:unable to get pins of source");
             Cleanup();
             return(false);
         }
         pinIn0 = DsFindPin.ByDirection(Mpeg2VideoCodec, PinDirection.Input, 0); //video
         pinIn1 = DsFindPin.ByDirection(Mpeg2AudioCodec, PinDirection.Input, 0); //audio
         if (pinIn0 == null || pinIn1 == null)
         {
             Log.Error("DVRMS2WMV:FAILED:unable to get pins of mpeg2 video/audio codec");
             Cleanup();
             return(false);
         }
         hr = graphBuilder.Connect(pinOut0, pinIn1);
         if (hr != 0)
         {
             Log.Error("DVRMS2WMV:FAILED:unable to connect audio pins :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         hr = graphBuilder.Connect(pinOut1, pinIn0);
         if (hr != 0)
         {
             Log.Error("DVRMS2WMV:FAILED:unable to connect video pins :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv");
         if (!AddWmAsfWriter(outputFilename, quality, standard))
         {
             return(false);
         }
         Log.Info("DVRMS2WMV: start pre-run");
         mediaControl = graphBuilder as IMediaControl;
         mediaSeeking = bufferSource as IStreamBufferMediaSeeking;
         mediaEvt     = graphBuilder as IMediaEventEx;
         mediaPos     = graphBuilder as IMediaPosition;
         //get file duration
         long lTime = 5 * 60 * 60;
         lTime *= 10000000;
         long pStop = 0;
         hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                        AMSeekingSeekingFlags.NoPositioning);
         if (hr == 0)
         {
             long lStreamPos;
             mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
             m_dDuration = lStreamPos;
             lTime       = 0;
             mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                       AMSeekingSeekingFlags.NoPositioning);
         }
         double duration = m_dDuration / 10000000d;
         Log.Info("DVRMS2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration));
         hr = mediaControl.Run();
         if (hr != 0)
         {
             Log.Error("DVRMS2WMV:FAILED:unable to start graph :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         int maxCount = 20;
         while (true)
         {
             long lCurrent;
             mediaSeeking.GetCurrentPosition(out lCurrent);
             double dpos = (double)lCurrent;
             dpos /= 10000000d;
             System.Threading.Thread.Sleep(100);
             if (dpos >= 2.0d)
             {
                 break;
             }
             maxCount--;
             if (maxCount <= 0)
             {
                 break;
             }
         }
         Log.Info("DVRMS2WMV: pre-run done");
         Log.Info("DVRMS2WMV: Get duration of movie");
         mediaControl.Stop();
         FilterState state;
         mediaControl.GetState(500, out state);
         GC.Collect();
         GC.Collect();
         GC.Collect();
         GC.WaitForPendingFinalizers();
         Log.Info("DVRMS2WMV: reconnect mpeg2 video codec->ASF WM Writer");
         graphBuilder.RemoveFilter(fileWriterbase);
         if (!AddWmAsfWriter(outputFilename, quality, standard))
         {
             return(false);
         }
         Log.Info("DVRMS2WMV: Start transcoding");
         hr = mediaControl.Run();
         if (hr != 0)
         {
             Log.Error("DVRMS2WMV:FAILED:unable to start graph :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
     }
     catch (Exception e)
     {
         // TODO: Handle exceptions.
         Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message);
         return(false);
     }
     return(true);
 }
コード例 #41
0
        // Thread entry point
        public void WorkerThread()
        {
            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder  graph       = null;
            IBaseFilter    sourceBase  = null;
            IBaseFilter    grabberBase = null;
            ISampleGrabber sg          = null;
            IMediaControl  mc          = null;

            try
            {
                // Get type for filter graph
                Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObj = Activator.CreateInstance(srvType);
                graph    = (IGraphBuilder)graphObj;

                // ----
                UCOMIBindCtx bindCtx = null;
                UCOMIMoniker moniker = null;
                int          n       = 0;

                // create bind context
                if (Win32.CreateBindCtx(0, out bindCtx) == 0)
                {
                    // convert moniker`s string to a moniker
                    if (Win32.MkParseDisplayName(bindCtx, source, ref n, out moniker) == 0)
                    {
                        // get device base filter
                        Guid filterId = typeof(IBaseFilter).GUID;
                        moniker.BindToObject(null, null, ref filterId, out sourceObj);

                        Marshal.ReleaseComObject(moniker);
                        moniker = null;
                    }
                    Marshal.ReleaseComObject(bindCtx);
                    bindCtx = null;
                }
                // ----

                if (sourceObj == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }

                sourceBase = (IBaseFilter)sourceObj;

                // Get type for sample grabber
                srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObj  = Activator.CreateInstance(srvType);
                sg          = (ISampleGrabber)grabberObj;
                grabberBase = (IBaseFilter)grabberObj;

                // add source filter to graph
                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mt = new AMMediaType();
                mt.majorType = MediaType.Video;
                mt.subType   = MediaSubType.RGB24;
                sg.SetMediaType(mt);

                // connect pins
                if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // get media type
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mt.Dispose();
                }

                // render
                graph.Render(DSTools.GetOutPin(grabberBase, 0));

                //
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                // window
                IVideoWindow win = (IVideoWindow)graphObj;
                win.put_AutoShow(false);
                win = null;


                // get media control
                mc = (IMediaControl)graphObj;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                System.Diagnostics.Debug.WriteLine("----: " + e.Message);
            }
            // finalization block
            finally
            {
                // release all objects
                mc          = null;
                graph       = null;
                sourceBase  = null;
                grabberBase = null;
                sg          = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (sourceObj != null)
                {
                    Marshal.ReleaseComObject(sourceObj);
                    sourceObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }
コード例 #42
0
ファイル: Form1.cs プロジェクト: possan/randomjunk
        private void Form1_Load(object sender, EventArgs e)
        {
            m_PictureReady = new ManualResetEvent(false);

            //  ¨PlayMovieInWindow( "c:\\temp\\beckscen2.avi" );
            int hr;

            this.graphBuilder = (IGraphBuilder)new FilterGraph();
            //            this.graphBuilder.AddFilter(

            // Get the SampleGrabber interface
            ISampleGrabber sampGrabber = new SampleGrabber() as ISampleGrabber;

            IBaseFilter ibfRenderer = null;
            IBaseFilter capFilter = null;
            IPin iPinInFilter = null;
            IPin iPinOutFilter = null;
            IPin iPinInDest = null;

            // Add the video source
            hr = graphBuilder.AddSourceFilter(Filename, "Ds.NET FileFilter", out capFilter);
            DsError.ThrowExceptionForHR(hr);

            // Hopefully this will be the video pin
            IPin iPinOutSource = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0);

            IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter;
            ConfigureSampleGrabber(sampGrabber);

            iPinInFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);
            iPinOutFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);

            // Add the frame grabber to the graph
            hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
            DsError.ThrowExceptionForHR(hr);

            hr = graphBuilder.Connect(iPinOutSource, iPinInFilter);
            DsError.ThrowExceptionForHR(hr);

            // Get the default video renderer
            ibfRenderer = (IBaseFilter)new VideoRendererDefault();

            // Add it to the graph
            hr = graphBuilder.AddFilter(ibfRenderer, "Ds.NET VideoRendererDefault");
            DsError.ThrowExceptionForHR(hr);
            iPinInDest = DsFindPin.ByDirection(ibfRenderer, PinDirection.Input, 0);

            // Connect the graph.  Many other filters automatically get added here
            hr = graphBuilder.Connect(iPinOutFilter, iPinInDest);
            DsError.ThrowExceptionForHR(hr);

            this.mediaControl = (IMediaControl)this.graphBuilder;
            this.mediaEventEx = (IMediaEventEx)this.graphBuilder;
            this.mediaSeeking = (IMediaSeeking)this.graphBuilder;
            this.mediaPosition = (IMediaPosition)this.graphBuilder;

            this.videoWindow = this.graphBuilder as IVideoWindow;
            this.basicVideo = this.graphBuilder as IBasicVideo;
            this.basicAudio = this.graphBuilder as IBasicAudio;

            hr = this.videoWindow.put_Owner(panel1.Handle);
            DsError.ThrowExceptionForHR(hr);

            hr = this.videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren);
            DsError.ThrowExceptionForHR(hr);

            if (this.basicVideo == null)
                return;

            int lHeight, lWidth;
            hr = this.basicVideo.GetVideoSize(out lWidth, out lHeight);

            Console.WriteLine("video: %d x %d\n", lWidth, lHeight);

            m_videoWidth = lWidth;
            m_videoHeight = lHeight;
            SaveSizeInfo(sampGrabber);
            newbitmap = new Bitmap(lWidth, lHeight, PixelFormat.Format24bppRgb);
            origbitmap = new Bitmap(lWidth, lHeight, PixelFormat.Format24bppRgb);
            m_ImageChanged = true;
            pictureBox1.Width = lWidth;
            pictureBox1.Height = lHeight;
            pictureBox2.Width = lWidth;
            pictureBox2.Height = lHeight;
            pictureBox2.Top = pictureBox1.Top + lHeight + 4;

            duration = 0.0f;
            this.mediaPosition.get_Duration(out duration);

            m_ipBuffer = Marshal.AllocCoTaskMem(Math.Abs(m_stride) * m_videoHeight);

            //             this.ClientSize = new Size(lWidth, lHeight);
            Application.DoEvents();

            hr = this.videoWindow.SetWindowPosition(0, 0, panel1.Width, panel1.Height);

            this.mediaControl.Run();
            timer1.Enabled = true;

            //            buildCaptureGRaph( this.de ( (capDevices[iDeviceNum], iWidth, iHeight, iBPP, hControl);

            //            buildCaptureaph();
        }
コード例 #43
0
    /// <summary>
    /// helper function to connect 2 filters
    /// </summary>
    /// <param name="graphBuilder">graph builder interface</param>
    /// <param name="pinSource">souce pin</param>
    /// <param name="filterDest">destination filter</param>
    /// <param name="destPinIndex">input pin index</param>
    public static bool ConnectPin(IGraphBuilder graphBuilder, IPin pinSource, IBaseFilter filterDest, int destPinIndex)
    {
      IPin pin;
      pinSource.ConnectedTo(out pin);
      if (pin != null)
      {
        Release.ComObject("Connect Pin", pin);
        return false;
      }
      IPin pinDest = DsFindPin.ByDirection(filterDest, PinDirection.Input, destPinIndex);
      if (pinDest == null)
        return false;

      int hr = graphBuilder.Connect(pinSource, pinDest);
      if (hr != 0)
      {
        Release.ComObject("Connect Pin", pinDest);
        return false;
      }
      Release.ComObject("Connect Pin", pinDest);
      return true;
    }
コード例 #44
0
ファイル: TSReader2MP4.cs プロジェクト: arangas/MediaPortal-1
 public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format,
                       MediaPortal.Core.Transcoding.Quality quality, Standard standard)
 {
   if (!Supports(format)) return false;
   string ext = System.IO.Path.GetExtension(info.file);
   if (ext.ToLowerInvariant() != ".ts" && ext.ToLowerInvariant() != ".mpg")
   {
     Log.Info("TSReader2MP4: wrong file format");
     return false;
   }
   try
   {
     graphBuilder = (IGraphBuilder)new FilterGraph();
     _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);
     Log.Info("TSReader2MP4: add filesource");
     TsReader reader = new TsReader();
     tsreaderSource = (IBaseFilter)reader;
     IBaseFilter filter = (IBaseFilter)tsreaderSource;
     graphBuilder.AddFilter(filter, "TSReader Source");
     IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource;
     Log.Info("TSReader2MP4: load file:{0}", info.file);
     int hr = fileSource.Load(info.file, null);
     //add audio/video codecs
     string strVideoCodec = "";
     string strH264VideoCodec = "";
     string strAudioCodec = "";
     string strAACAudioCodec = "";
     using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
     {
       strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "");
       strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "");
       strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", "");
       strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", "");
     }
     //Find the type of decoder required for the output video & audio pins on TSReader.
     Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders");
     IPin pinOut0, pinOut1;
     IPin pinIn0, pinIn1;
     pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio
     pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video
     if (pinOut0 == null || pinOut1 == null)
     {
       Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader");
       Cleanup();
       return false;
     }
     bool usingAAC = false;
     IEnumMediaTypes enumMediaTypes;
     hr = pinOut0.EnumMediaTypes(out enumMediaTypes);
     while (true)
     {
       AMMediaType[] mediaTypes = new AMMediaType[1];
       int typesFetched;
       hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
       if (hr != 0 || typesFetched == 0) break;
       if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
       {
         Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader");
         usingAAC = true;
       }
     }
     bool usingH264 = false;
     hr = pinOut1.EnumMediaTypes(out enumMediaTypes);
     while (true)
     {
       AMMediaType[] mediaTypes = new AMMediaType[1];
       int typesFetched;
       hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
       if (hr != 0 || typesFetched == 0) break;
       if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1)
       {
         Log.Info("TSReader2MP4: found H.264 video out pin on tsreader");
         usingH264 = true;
       }
     }
     //Add the type of decoder required for the output video & audio pins on TSReader.
     Log.Info("TSReader2MP4: add audio/video decoders to graph");
     if (usingH264 == false)
     {
       Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec);
       VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
       if (VideoCodec == null)
       {
         Log.Error("TSReader2MP4: unable to add mpeg2 video decoder");
         Cleanup();
         return false;
       }
     }
     else
     {
       Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec);
       VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec);
       if (VideoCodec == null)
       {
         Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec");
         Cleanup();
         return false;
       }
     }
     if (usingAAC == false)
     {
       Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec);
       AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
       if (AudioCodec == null)
       {
         Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec");
         Cleanup();
         return false;
       }
     }
     else
     {
       Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec);
       AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
       if (AudioCodec == null)
       {
         Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec");
         Cleanup();
         return false;
       }
     }
     Log.Info("TSReader2MP4: connect tsreader->audio/video decoders");
     //connect output #0 (audio) of tsreader->audio decoder input pin 0
     //connect output #1 (video) of tsreader->video decoder input pin 0
     pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
     pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
     if (pinIn0 == null || pinIn1 == null)
     {
       Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs");
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut0, pinIn0);
     if (hr != 0)
     {
       Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut1, pinIn1);
     if (hr != 0)
     {
       Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     //add encoders, muxer & filewriter
     if (!AddCodecs(graphBuilder, info)) return false;
     //setup graph controls
     mediaControl = graphBuilder as IMediaControl;
     mediaSeeking = tsreaderSource as IMediaSeeking;
     mediaEvt = graphBuilder as IMediaEventEx;
     mediaPos = graphBuilder as IMediaPosition;
     //get file duration
     Log.Info("TSReader2MP4: Get duration of recording");
     long lTime = 5 * 60 * 60;
     lTime *= 10000000;
     long pStop = 0;
     hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                    AMSeekingSeekingFlags.NoPositioning);
     if (hr == 0)
     {
       long lStreamPos;
       mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
       m_dDuration = lStreamPos;
       lTime = 0;
       mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                 AMSeekingSeekingFlags.NoPositioning);
     }
     double duration = m_dDuration / 10000000d;
     Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration));
     //run the graph to initialize the filters to be sure
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     int maxCount = 20;
     while (true)
     {
       long lCurrent;
       mediaSeeking.GetCurrentPosition(out lCurrent);
       double dpos = (double)lCurrent;
       dpos /= 10000000d;
       System.Threading.Thread.Sleep(100);
       if (dpos >= 2.0d) break;
       maxCount--;
       if (maxCount <= 0) break;
     }
     mediaControl.Stop();
     FilterState state;
     mediaControl.GetState(500, out state);
     GC.Collect();
     GC.Collect();
     GC.Collect();
     GC.WaitForPendingFinalizers();
     graphBuilder.RemoveFilter(mp4Muxer);
     graphBuilder.RemoveFilter(h264Encoder);
     graphBuilder.RemoveFilter(aacEncoder);
     graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter);
     if (!AddCodecs(graphBuilder, info)) return false;
     //Set Encoder quality & Muxer settings
     if (!EncoderSet(graphBuilder, info)) return false;
     //start transcoding - run the graph
     Log.Info("TSReader2MP4: start transcoding");
     //setup flow control
     //need to leverage CBAsePin, CPullPin & IAsyncReader methods.
     IAsyncReader synchVideo = null;
     mediaSample = VideoCodec as IMediaSample;
     hr = synchVideo.SyncReadAligned(mediaSample);
     //So we only parse decoder output whent the encoders are ready.
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
   }
   catch (Exception ex)
   {
     Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message);
     Cleanup();
     return false;
   }
   return true;
 }
コード例 #45
0
ファイル: RTSPPlayer.cs プロジェクト: usermonk/MediaPortal-1
        /// <summary> create the used COM components and get the interfaces. </summary>
        protected bool GetInterfaces()
        {
            VMR9Util.g_vmr9 = null;
            if (IsRadio == false)
            {
                Vmr9 = VMR9Util.g_vmr9 = new VMR9Util();

                // switch back to directx fullscreen mode
                Log.Info("RTSPPlayer: Enabling DX9 exclusive mode");
                GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null);
                GUIWindowManager.SendMessage(msg);
            }
            //Type comtype = null;
            //object comobj = null;

            DsRect rect = new DsRect();

            rect.top    = 0;
            rect.bottom = GUIGraphicsContext.form.Height;
            rect.left   = 0;
            rect.right  = GUIGraphicsContext.form.Width;


            try
            {
                graphBuilder = (IGraphBuilder) new FilterGraph();

                Log.Info("RTSPPlayer: add source filter");
                if (IsRadio == false)
                {
                    bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder);
                    if (!AddVMR9)
                    {
                        Log.Error("RTSPPlayer:Failed to add VMR9 to graph");
                        return(false);
                    }
                    VMR9Util.g_vmr9.Enable(false);
                }

                _mpegDemux = (IBaseFilter) new MPEG2Demultiplexer();
                graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer");

                _rtspSource = (IBaseFilter) new RtpSourceFilter();
                int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter");
                if (hr != 0)
                {
                    Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr);
                    return(false);
                }

                // add preferred video & audio codecs
                Log.Info("RTSPPlayer: add video/audio codecs");
                string strVideoCodec               = "";
                string strAudioCodec               = "";
                string strAudiorenderer            = "";
                int    intFilters                  = 0;  // FlipGer: count custom filters
                string strFilters                  = ""; // FlipGer: collect custom filters
                string postProcessingFilterSection = "mytv";
                using (Settings xmlreader = new MPSettings())
                {
                    if (_mediaType == g_Player.MediaType.Video)
                    {
                        strVideoCodec               = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", "");
                        strAudioCodec               = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", "");
                        strAudiorenderer            = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device");
                        postProcessingFilterSection = "movieplayer";
                    }
                    else
                    {
                        strVideoCodec               = xmlreader.GetValueAsString("mytv", "videocodec", "");
                        strAudioCodec               = xmlreader.GetValueAsString("mytv", "audiocodec", "");
                        strAudiorenderer            = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device");
                        postProcessingFilterSection = "mytv";
                    }
                    enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false);
                    // FlipGer: load infos for custom filters
                    int intCount = 0;
                    while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") !=
                           "undefined")
                    {
                        if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false))
                        {
                            strFilters +=
                                xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") +
                                ";";
                            intFilters++;
                        }
                        intCount++;
                    }
                }
                string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant();
                if (IsRadio == false)
                {
                    if (strVideoCodec.Length > 0)
                    {
                        DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
                    }
                }
                if (strAudioCodec.Length > 0)
                {
                    DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
                }

                if (enableDvbSubtitles == true)
                {
                    try
                    {
                        _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder);
                        SubtitleRenderer.GetInstance().SetPlayer(this);
                        dvbSubRenderer = SubtitleRenderer.GetInstance();
                    }
                    catch (Exception e)
                    {
                        Log.Error(e);
                    }
                }

                Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null));
                // FlipGer: add custom filters to graph
                string[] arrFilters = strFilters.Split(';');
                for (int i = 0; i < intFilters; i++)
                {
                    DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]);
                }
                if (strAudiorenderer.Length > 0)
                {
                    audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false);
                }

                Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile);
                IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource;
                if (interfaceFile == null)
                {
                    Log.Error("RTSPPlayer:Failed to get IFileSourceFilter");
                    return(false);
                }

                //Log.Info("RTSPPlayer: open file:{0}",filename);
                hr = interfaceFile.Load(m_strCurrentFile, null);
                if (hr != 0)
                {
                    Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr);
                    return(false);
                }

                #region connect rtspsource->demux

                Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux");
                IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0);
                if (pinTsOut == null)
                {
                    Log.Info("RTSPPlayer:failed to find output pin of tsfilesource");
                    return(false);
                }
                IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0);
                if (pinDemuxIn == null)
                {
                    Log.Info("RTSPPlayer:failed to find output pin of tsfilesource");
                    return(false);
                }

                hr = graphBuilder.Connect(pinTsOut, pinDemuxIn);
                if (hr != 0)
                {
                    Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr);
                    return(false);
                }
                DirectShowUtil.ReleaseComObject(pinTsOut);
                DirectShowUtil.ReleaseComObject(pinDemuxIn);

                #endregion

                #region render demux output pins

                if (IsRadio)
                {
                    Log.Info("RTSPPlayer:render audio demux outputs");
                    IEnumPins enumPins;
                    _mpegDemux.EnumPins(out enumPins);
                    IPin[] pins    = new IPin[2];
                    int    fetched = 0;
                    while (enumPins.Next(1, pins, out fetched) == 0)
                    {
                        if (fetched != 1)
                        {
                            break;
                        }
                        PinDirection direction;
                        pins[0].QueryDirection(out direction);
                        if (direction == PinDirection.Input)
                        {
                            continue;
                        }
                        IEnumMediaTypes enumMediaTypes;
                        pins[0].EnumMediaTypes(out enumMediaTypes);
                        AMMediaType[] mediaTypes = new AMMediaType[20];
                        int           fetchedTypes;
                        enumMediaTypes.Next(20, mediaTypes, out fetchedTypes);
                        for (int i = 0; i < fetchedTypes; ++i)
                        {
                            if (mediaTypes[i].majorType == MediaType.Audio)
                            {
                                graphBuilder.Render(pins[0]);
                                break;
                            }
                        }
                    }
                }
                else
                {
                    Log.Info("RTSPPlayer:render audio/video demux outputs");
                    IEnumPins enumPins;
                    _mpegDemux.EnumPins(out enumPins);
                    IPin[] pins    = new IPin[2];
                    int    fetched = 0;
                    while (enumPins.Next(1, pins, out fetched) == 0)
                    {
                        if (fetched != 1)
                        {
                            break;
                        }
                        PinDirection direction;
                        pins[0].QueryDirection(out direction);
                        if (direction == PinDirection.Input)
                        {
                            continue;
                        }
                        graphBuilder.Render(pins[0]);
                    }
                }

                #endregion

                // Connect DVB subtitle filter pins in the graph
                if (_mpegDemux != null && enableDvbSubtitles == true)
                {
                    IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer;
                    hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr);

                    if (hr == 0)
                    {
                        Log.Info("RTSPPlayer:_pinPcr OK");

                        IPin pDemuxerPcr  = DsFindPin.ByName(_mpegDemux, "Pcr");
                        IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr");
                        hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr);
                    }
                    else
                    {
                        Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr);
                    }

                    hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle);
                    if (hr == 0)
                    {
                        Log.Info("RTSPPlayer:_pinSubtitle OK");

                        IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle");
                        IPin pSubtitle        = DsFindPin.ByName(_subtitleFilter, "In");
                        hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle);
                    }
                    else
                    {
                        Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr);
                    }

                    hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT);
                    if (hr == 0)
                    {
                        Log.Info("RTSPPlayer:_pinPMT OK");

                        IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT");
                        IPin pSubtitle        = DsFindPin.ByName(_subtitleFilter, "PMT");
                        hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle);
                    }
                    else
                    {
                        Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr);
                    }
                }


                if (IsRadio == false)
                {
                    if (!VMR9Util.g_vmr9.IsVMR9Connected)
                    {
                        //VMR9 is not supported, switch to overlay
                        Log.Info("RTSPPlayer: vmr9 not connected");
                        _mediaCtrl = null;
                        Cleanup();
                        return(false);
                    }
                    VMR9Util.g_vmr9.SetDeinterlaceMode();
                }

                _mediaCtrl    = (IMediaControl)graphBuilder;
                mediaEvt      = (IMediaEventEx)graphBuilder;
                _mediaSeeking = (IMediaSeeking)graphBuilder;
                mediaPos      = (IMediaPosition)graphBuilder;
                basicAudio    = graphBuilder as IBasicAudio;
                //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched);
                DirectShowUtil.EnableDeInterlace(graphBuilder);
                if (VMR9Util.g_vmr9 != null)
                {
                    m_iVideoWidth  = VMR9Util.g_vmr9.VideoWidth;
                    m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight;
                }
                if (audioRendererFilter != null)
                {
                    Log.Info("RTSPPlayer9:set reference clock");
                    IMediaFilter    mp    = graphBuilder as IMediaFilter;
                    IReferenceClock clock = audioRendererFilter as IReferenceClock;
                    hr = mp.SetSyncSource(null);
                    hr = mp.SetSyncSource(clock);
                    Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr);
                }
                Log.Info("RTSPPlayer: graph build successfull");
                return(true);
            }
            catch (Exception ex)
            {
                Error.SetError("Unable to play movie", "Unable build graph for VMR9");
                Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace);
                CloseInterfaces();
                return(false);
            }
        }
コード例 #46
0
    /// <summary>
    /// Connects two filters to one another
    /// </summary>
    /// <param name="graphBuilder">current graph reference</param>
    /// <param name="sourceFilter">source filter</param>
    /// <param name="destinationFilter">destination filetr</param>
    /// <param name="deviceName">filter name</param>
    /// <returns></returns>
    public static bool ConnectFilter(IGraphBuilder graphBuilder, IBaseFilter sourceFilter, IBaseFilter destinationFilter,
                                     string deviceName)
    {
      //Log.Log.WriteFile("analog: ConnectFilter()");
      IPin pinIn = DsFindPin.ByDirection(destinationFilter, PinDirection.Input, 0);
      Log.Log.WriteFile("analog:  PinDest:{0}", LogPinInfo(pinIn));
      for (int i = 0; i <= 10; ++i)
      {
        IPin pinOut = DsFindPin.ByDirection(sourceFilter, PinDirection.Output, i);
        if (pinOut == null)
          return false;
        Log.Log.WriteFile("analog:  pinSource {0}:{1}", i, LogPinInfo(pinOut));

        //Hauppauge hack - sorry, i attempted to do this right, but hauppauge drivers report incorrect values
        //and it takes a very long time to reject the audio to video connection - diehard2

        int hr = -1;
        IPin testPin = null;
        try
        {
          pinOut.ConnectedTo(out testPin);
        }
        catch (Exception ex)
        {
          Log.Log.WriteFile("Error while connecting a filter: ", ex);
        }
        if (testPin != null)
        {
          Release.ComObject("outPin", pinOut);
          Release.ComObject("testPin", testPin);
          Log.Log.WriteFile("analog: skipping pin");
          continue;
        }
        if (deviceName.Contains("Hauppauge") &&
            (LogPinInfo(pinOut).Contains("Audio") || LogPinInfo(pinIn).Contains("Audio")))
        {
          if (LogPinInfo(pinOut).Contains("Audio") && LogPinInfo(pinIn).Contains("Audio"))
            hr = graphBuilder.Connect(pinOut, pinIn);
        }
        else
          hr = graphBuilder.Connect(pinOut, pinIn);

        if (hr == 0)
        {
          Log.Log.WriteFile("analog:  pins connected");
          Release.ComObject("pinIn", pinIn);
          Release.ComObject("pinOut", pinOut);
          return true;
        }
        Release.ComObject("pinOut", pinOut);
        Release.ComObject("testPin", testPin);
      }
      Release.ComObject("pinIn", pinIn);
      return false;
    }
コード例 #47
0
        public static void ConnectFilters(IGraphBuilder graphBuilder, IPin sourcePin, IPin destinationPin,
                                          bool useIntelligentConnect)
        {
            int hr = 0;

            if (graphBuilder == null)
                throw new ArgumentNullException("graphBuilder");

            if (sourcePin == null)
                throw new ArgumentNullException("sourcePin");

            if (destinationPin == null)
                throw new ArgumentNullException("destinationPin");

            if (useIntelligentConnect)
            {
                hr = graphBuilder.Connect(sourcePin, destinationPin);
                DsError.ThrowExceptionForHR(hr);
            }
            else
            {
                hr = graphBuilder.ConnectDirect(sourcePin, destinationPin, null);
                DsError.ThrowExceptionForHR(hr);
            }
        }
コード例 #48
0
 /// <summary>
 /// helper function to connect 2 filters
 /// </summary>
 /// <param name="graphBuilder">The graph builder.</param>
 /// <param name="sourceFilter">The source filter.</param>
 /// <param name="destinationFilter">The destination filter.</param>
 /// <returns></returns>
 public static bool ConnectFilter(IGraphBuilder graphBuilder, IBaseFilter sourceFilter, IBaseFilter destinationFilter)
 {
   //Log.Log.WriteFile("analog: ConnectFilter()");
   IPin pinIn = DsFindPin.ByDirection(destinationFilter, PinDirection.Input, 0);
   Log.Log.WriteFile("analog:  PinDest:{0}", LogPinInfo(pinIn));
   for (int i = 0; i <= 10; ++i)
   {
     IPin pinOut = DsFindPin.ByDirection(sourceFilter, PinDirection.Output, i);
     Log.Log.WriteFile("analog:  pinSource {0}:{1}", i, LogPinInfo(pinOut));
     int hr = graphBuilder.Connect(pinOut, pinIn);
     if (hr == 0)
     {
       Log.Log.WriteFile("analog:  pins connected");
       Release.ComObject("pinIn", pinIn);
       Release.ComObject("pinOut", pinOut);
       return true;
     }
     Release.ComObject("pinOut", pinOut);
   }
   Release.ComObject("pinIn", pinIn);
   return false;
 }
コード例 #49
0
ファイル: CxDSMediaPlayer.cs プロジェクト: jionyamakita/DSLab
        /// <summary>
        /// グラフの生成
        /// </summary>
        public virtual void Setup()
        {
            this.Dispose();

            try
            {
                // グラフ.
                // CoCreateInstance
                GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph)));

                #region フィルタ追加.
                // ファイル入力.
                IBaseFilter capture = null;
                GraphBuilder.AddSourceFilter(SourceFile, "CaptureFilter", ref capture);
                if (capture == null)
                {
                    throw new System.IO.IOException();
                }

#if false
                // DMO ラッパーフィルタ.
                // https://msdn.microsoft.com/ja-jp/library/cc371140.aspx
                IBaseFilter dmo = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_DMOWrapperFilter)));
                if (dmo != null)
                {
                    //// Mpeg4 Decoder DMO
                    //// F371728A-6052-4D47-827C-D039335DFE0A
                    //// 4A69B442-28BE-4991-969C-B500ADF5D8A8
                    //// mpg4decd.dll [C:\Windows\System32, C:\Windows\SysWOW64]

                    var idmo = (IDMOWrapperFilter)dmo;
                    idmo.Init(new Guid("F371728A-6052-4D47-827C-D039335DFE0A"), new Guid("4A69B442-28BE-4991-969C-B500ADF5D8A8"));
                    idmo = null;

                    this.GraphBuilder.AddFilter(dmo, "Mpeg4 Decoder DMO");
                }
#endif

#if false
                // Avi Splitter
                IBaseFilter splitter = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVISplitter)));
                if (splitter == null)
                {
                    throw new System.IO.IOException();
                }
                this.GraphBuilder.AddFilter(splitter, "Avi Splitter");

                // Avi Decompressor
                IBaseFilter decompressor = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVIDec)));
                if (decompressor == null)
                {
                    throw new System.IO.IOException();
                }
                this.GraphBuilder.AddFilter(decompressor, "Avi Decompressor");
#endif

                // サンプルグラバー.
                IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber();
                if (grabber == null)
                {
                    throw new System.IO.IOException();
                }
                this.GraphBuilder.AddFilter(grabber, "SampleGrabber");

                // レンダラー.
                IBaseFilter renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer)));
                if (renderer == null)
                {
                    throw new System.IO.IOException();
                }
                this.GraphBuilder.AddFilter(renderer, "Renderer");
                #endregion

                #region ピンの取得.
                IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                IPin grabber_in  = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT);
                IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT);
                IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT);
                #endregion

                #region ピンの接続.
                GraphBuilder.Connect(capture_out, grabber_in);
                GraphBuilder.Connect(grabber_out, renderer_in);
                #endregion

                #region 保管: インターフェース.
                CaptureFilter       = capture;
                CaptureOutPin       = capture_out;
                SampleGrabber       = (ISampleGrabber)grabber;
                SampleGrabberInPin  = grabber_in;
                SampleGrabberOutPin = grabber_out;
                Renderer            = renderer;
                RendererInPin       = renderer_in;
                #endregion

                #region 保管: フレームサイズ.
                VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber);
                this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader;
                this.SampleGrabberCB.FrameSize  = new Size(
                    System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth),
                    System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight)
                    );
                #endregion

                #region インタフェースの抽出:
                {
                    DSLab.IGraphBuilder graph   = this.GraphBuilder;
                    DSLab.IEnumFilters  filters = null;
                    DSLab.IBaseFilter   filter  = null;
                    int fetched = 0;

                    int hr = graph.EnumFilters(ref filters);
                    while (filters.Next(1, ref filter, ref fetched) == (int)DSLab.HRESULT.S_OK)
                    {
                        if (fetched == 0)
                        {
                            break;
                        }

                        if (filter is DSLab.IMediaSeeking)
                        {
                            // シーク操作用.
                            Seeking = (DSLab.IMediaSeeking)filter;
                        }
                        else
                        {
                            // フィルタ解放.
                            Marshal.ReleaseComObject(filter);
                            filter = null;
                        }
                    }

                    // 解放.
                    Marshal.ReleaseComObject(filters);
                }
                #endregion

                // DEBUG
#if DEBUG
                DebugPrint(this.GraphBuilder);
#endif
            }
            catch (Exception ex)
            {
                throw new DSLab.CxDSException(ex);
            }
        }
コード例 #50
0
 /// <summary>
 /// helper function to connect 2 filters
 /// </summary>
 /// <param name="graphBuilder">The graph builder.</param>
 /// <param name="sourceFilter">The source filter.</param>
 /// <param name="pinDestination">The pin destination.</param>
 /// <param name="sourcePinIndex">The determined output index of the source filter</param>
 /// <returns></returns>
 public static bool ConnectFilter(IGraphBuilder graphBuilder, IBaseFilter sourceFilter, IPin pinDestination,
                                  out int sourcePinIndex)
 {
   //Log.Log.WriteFile("analog: ConnectFilter()");
   Log.Log.WriteFile("analog:  PinDest:{0}", LogPinInfo(pinDestination));
   sourcePinIndex = -1;
   for (int i = 0; i <= 10; ++i)
   {
     IPin pinOut = DsFindPin.ByDirection(sourceFilter, PinDirection.Output, i);
     if (pinOut == null)
       return false;
     Log.Log.WriteFile("analog:  pinSource {0}:{1}", i, LogPinInfo(pinOut));
     int hr = graphBuilder.Connect(pinOut, pinDestination);
     if (hr == 0)
     {
       Log.Log.WriteFile("analog:  pins connected");
       Release.ComObject("pindest" + i, pinOut);
       sourcePinIndex = i;
       return true;
     }
     Release.ComObject("pindest" + i, pinOut);
   }
   return false;
 }
コード例 #51
0
        private bool AddCodecs(IGraphBuilder graphBuilder, TranscodeInfo info)
        {
            //TODO: Add de-interlacing probably by filter
            int hr;

            Log.Info("TSReader2MP4: add h264 video encoder to graph");
            //Lead H264 Encoder (4.0)
            string monikerH264 = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{E2B7DF52-38C5-11D5-91F6-00104BDB8FF9}";

            h264Encoder = Marshal.BindToMoniker(monikerH264) as IBaseFilter;
            if (h264Encoder == null)
            {
                Log.Error("TSReader2MP4: FAILED: Unable to create h264 video encoder");
                Cleanup();
                return(false);
            }
            hr = graphBuilder.AddFilter(h264Encoder, "h264 video encoder");
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: h264 video encoder to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            Log.Info("TSReader2MP4: add aac audio encoder to graph");
            //Monograph AAC Encoder
            //string monikerAAC = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{88F36DB6-D898-40B5-B409-466A0EECC26A}";
            //Lead AAC Encoder
            string monikerAAC = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{E2B7DD70-38C5-11D5-91F6-00104BDB8FF9}";

            aacEncoder = Marshal.BindToMoniker(monikerAAC) as IBaseFilter;
            if (aacEncoder == null)
            {
                Log.Error("TSReader2MP4: FAILED: Unable to create aac audio encoder");
                Cleanup();
                return(false);
            }
            hr = graphBuilder.AddFilter(aacEncoder, "aac audio encoder");
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: Add aac audio encoder to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            // dump filter ????
            //add filewriter
            Log.Info("TSReader2MP4: add FileWriter to graph");
            string monikerFileWrite =
                @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{8596E5F0-0DA5-11D0-BD21-00A0C911CE86}";
            IBaseFilter fileWriterbase = Marshal.BindToMoniker(monikerFileWrite) as IBaseFilter;

            if (fileWriterbase == null)
            {
                Log.Error("TSReader2MP4: FAILED: Unable to create FileWriter");
                Cleanup();
                return(false);
            }
            fileWriterFilter = fileWriterbase as IFileSinkFilter2;
            if (fileWriterFilter == null)
            {
                Log.Error("TSReader2MP4: FAILED: Add unable to get IFileSinkFilter for filewriter");
                Cleanup();
                return(false);
            }

            hr = graphBuilder.AddFilter(fileWriterbase, "FileWriter");
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: Add FileWriter to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            //set output filename
            string outputFileName = System.IO.Path.ChangeExtension(info.file, ".mp4");

            Log.Info("TSReader2MP4: set output file to :{0}", outputFileName);
            hr = fileWriterFilter.SetFileName(outputFileName, null);
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: unable to set filename for filewriter :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            // add mp4 muxer
            Log.Info("TSReader2MP4: add MP4 Muxer to graph");
            //Lead ISO Multiplexer
            string monikermp4Muxer =
                @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{990D1978-E48D-43AF-B12D-24A7456EC89F}";

            mp4Muxer = Marshal.BindToMoniker(monikermp4Muxer) as IBaseFilter;
            if (mp4Muxer == null)
            {
                Log.Error("TSReader2MP4: FAILED: Unable to create MP4Mux");
                Cleanup();
                return(false);
            }
            hr = graphBuilder.AddFilter(mp4Muxer, "MP4Mux");
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: Add MP4Mux to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            //connect output of audio codec to aac encoder
            IPin pinOut, pinIn;

            Log.Info("TSReader2MP4: connect audio codec->aac encoder");
            pinIn = DsFindPin.ByDirection(aacEncoder, PinDirection.Input, 0);
            if (pinIn == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get input pin of aac encoder:0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinOut = DsFindPin.ByDirection(AudioCodec, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get output pin of audio codec :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: unable to connect audio codec->aac encoder: 0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            //connect output of video codec to h264 encoder
            Log.Info("TSReader2MP4: connect video codec->h264 encoder");
            pinIn = DsFindPin.ByDirection(h264Encoder, PinDirection.Input, 0);
            if (pinIn == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get input pin of h264 encoder:0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinOut = DsFindPin.ByDirection(VideoCodec, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get output pin of video codec :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: unable to connect video codec->h264 encoder :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            //connect output of aac encoder to pin#0 of mp4mux
            Log.Info("TSReader2MP4: connect aac encoder->mp4mux");
            pinOut = DsFindPin.ByDirection(aacEncoder, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get input pin of aac encoder:0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinIn = DsFindPin.ByDirection(mp4Muxer, PinDirection.Input, 0);
            if (pinIn == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get input pin#1 of mp4 muxer :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: unable to connect aac encoder->mp4mux: 0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            //connect output of h264 encoder to pin#1 of mp4mux
            Log.Info("TSReader2MP4: connect h264 encoder->mp4mux");
            pinOut = DsFindPin.ByDirection(h264Encoder, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get input pin of h264 encoder :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinIn = DsFindPin.ByDirection(mp4Muxer, PinDirection.Input, 1);
            if (pinIn == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get input#0 pin of mp4mux :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: unable to connect h264 encoder->mp4mux :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            // dump filter??
            //connect mp4 muxer out->filewriter
            Log.Info("TSReader2MP4: connect mp4mux->filewriter");
            pinOut = DsFindPin.ByDirection(mp4Muxer, PinDirection.Output, 0);
            if (pinOut == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get output pin of avimux:0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinIn = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 0);
            if (pinIn == null)
            {
                Log.Error("TSReader2MP4: FAILED: cannot get input pin of filewriter :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut, pinIn);
            if (hr != 0)
            {
                Log.Error("TSReader2MP4: FAILED: connect mp4 muxer->filewriter :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            return(true);
        }
コード例 #52
0
    public static void RemoveFromGraph(IGraphBuilder graphBuilder)
    {
      IBaseFilter vob = null;
      using (Settings xmlreader = new MPSettings())
      {
        string engineType = xmlreader.GetValueAsString("subtitles", "engine", "DirectVobSub");
        XySubFilter = engineType.Equals("XySubFilter");
      }

      if (!XySubFilter)
      {
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubAutoload, out vob);
        if (vob == null)
        {
          //Try the "normal" filter then.
          DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.DirectVobSubNormal, out vob);
        }
      }

      if (vob == null)
      {
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.XySubFilterAutoload, out vob);
        if (vob != null)
        {
          //remove the XySubFilter filter from the graph
          graphBuilder.RemoveFilter(vob);
          DirectShowUtil.ReleaseComObject(vob);
          vob = null;
          return;
        }

        //Try the XySubFilter "normal" filter then.
        DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.XySubFilterNormal, out vob);
        if (vob != null)
        {
          //remove the XySubFilter filter from the graph
          graphBuilder.RemoveFilter(vob);
          DirectShowUtil.ReleaseComObject(vob);
          vob = null;
        }
        return;
      }

      Log.Info("VideoPlayerVMR9: DirectVobSub in graph, removing...");
      // Check where video inputs are connected
      IPin pinVideoIn = DsFindPin.ByDirection(vob, PinDirection.Input, 0);
      IPin pinVideoOut = DsFindPin.ByDirection(vob, PinDirection.Output, 0);

      //find directvobsub's video output pin source input pin
      IPin pinVideoTo = null;
      if (pinVideoOut != null)
      {
        pinVideoOut.ConnectedTo(out pinVideoTo);
      }

      //find directvobsub's video input pin source output pin
      IPin pinVideoFrom = null;
      if (pinVideoIn != null)
      {
        pinVideoIn.ConnectedTo(out pinVideoFrom);
      }

      int hr = 0;

      if (pinVideoFrom != null)
      {
        hr = pinVideoFrom.Disconnect();
        if (hr != 0)
        {
          Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting source pin");
        }
      }

      if (pinVideoTo != null)
      {
        hr = pinVideoTo.Disconnect();
        if (hr != 0)
        {
          Log.Error("VideoPlayerVMR9: DirectVobSub failed disconnecting destination pin");
        }
      }

      //remove the DirectVobSub filter from the graph
      graphBuilder.RemoveFilter(vob);
      DirectShowUtil.ReleaseComObject(vob);
      vob = null;

      //reconnect the source output pin to the vmr9/evr filter
      if (pinVideoFrom != null)
      {
        if (pinVideoTo != null)
        {
          hr = graphBuilder.Connect(pinVideoFrom, pinVideoTo);
        }
        //hr = graphBuilder.Render(pinVideoFrom);
        DirectShowUtil.ReleaseComObject(pinVideoFrom);
        pinVideoFrom = null;
      }

      if (pinVideoTo != null)
      {
        DirectShowUtil.ReleaseComObject(pinVideoTo);
        pinVideoTo = null;
      }
      if (pinVideoOut != null)
      {
        DirectShowUtil.ReleaseComObject(pinVideoOut);
        pinVideoOut = null;
      }

      if (pinVideoIn != null)
      {
        DirectShowUtil.ReleaseComObject(pinVideoIn);
        pinVideoIn = null;
      }

      if (hr != 0)
        Log.Error("VideoPlayerVMR9: Could not connect video out to video renderer: {0}", hr);
      else
        Log.Debug("VideoPlayerVMR9: DirectVobSub graph rebuild finished");
    }
コード例 #53
0
ファイル: FileVideoSource.cs プロジェクト: old8xp/AForge.NET
        /// <summary>
        /// Worker thread.
        /// </summary>
        ///
        private void WorkerThread( )
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder  graph         = null;
            IBaseFilter    sourceBase    = null;
            IBaseFilter    grabberBase   = null;
            ISampleGrabber sampleGrabber = null;
            IMediaControl  mediaControl  = null;

            IMediaEventEx mediaEvent = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                graph.AddSourceFilter(fileName, "source", out sourceBase);
                if (sourceBase == null)
                {
                    throw new ApplicationException("Failed creating source filter");
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add grabber filters to graph
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType( );
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                int pinToTry = 0;

                IPin inPin  = Tools.GetInPin(grabberBase, 0);
                IPin outPin = null;

                // find output pin acceptable by sample grabber
                while (true)
                {
                    outPin = Tools.GetOutPin(sourceBase, pinToTry);

                    if (outPin == null)
                    {
                        Marshal.ReleaseComObject(inPin);
                        throw new ApplicationException("Did not find acceptable output video pin in the given source");
                    }

                    if (graph.Connect(outPin, inPin) < 0)
                    {
                        Marshal.ReleaseComObject(outPin);
                        outPin = null;
                        pinToTry++;
                    }
                    else
                    {
                        break;
                    }
                }

                Marshal.ReleaseComObject(outPin);
                Marshal.ReleaseComObject(inPin);

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose( );
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // disable clock, if someone requested it
                if (!referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    mediaFilter.SetSyncSource(null);
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;
                IntPtr   p1, p2;
                DsEvCode code;

                // run
                mediaControl.Run( );

                do
                {
                    if (mediaEvent != null)
                    {
                        if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0)
                        {
                            mediaEvent.FreeEventParams(code, p1, p2);

                            if (code == DsEvCode.Complete)
                            {
                                reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;
                                break;
                            }
                        }
                    }
                }while (!stopEvent.WaitOne(100, false));

                mediaControl.Stop( );
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                mediaEvent    = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceBase != null)
                {
                    Marshal.ReleaseComObject(sourceBase);
                    sourceBase = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }
コード例 #54
0
 public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard)
 {
   try
   {
     if (!Supports(format)) return false;
     string ext = System.IO.Path.GetExtension(info.file);
     if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg")
     {
       Log.Info("TSReader2WMV: wrong file format");
       return false;
     }
     Log.Info("TSReader2WMV: create graph");
     graphBuilder = (IGraphBuilder)new FilterGraph();
     _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);
     Log.Info("TSReader2WMV: add filesource");
     TsReader reader = new TsReader();
     tsreaderSource = (IBaseFilter)reader;
     //ITSReader ireader = (ITSReader)reader;
     //ireader.SetTsReaderCallback(this);
     //ireader.SetRequestAudioChangeCallback(this);
     IBaseFilter filter = (IBaseFilter)tsreaderSource;
     graphBuilder.AddFilter(filter, "TSReader Source");
     IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource;
     Log.Info("TSReader2WMV: load file:{0}", info.file);
     int hr = fileSource.Load(info.file, null);
     //add audio/video codecs
     string strVideoCodec = "";
     string strH264VideoCodec = "";
     string strAudioCodec = "";
     string strAACAudioCodec = "";
     using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
     {
       strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "");
       strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "");
       strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", "");
       strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", "");
     }
     //Find the type of decoder required for the output video & audio pins on TSReader.
     Log.Info("TSReader2WMV: find tsreader compatible audio/video decoders");
     IPin pinOut0, pinOut1;
     IPin pinIn0, pinIn1;
     pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio
     pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video
     if (pinOut0 == null || pinOut1 == null)
     {
       Log.Error("TSReader2WMV: FAILED: unable to get output pins of tsreader");
       Cleanup();
       return false;
     }
     bool usingAAC = false;
     IEnumMediaTypes enumMediaTypes;
     hr = pinOut0.EnumMediaTypes(out enumMediaTypes);
     while (true)
     {
       AMMediaType[] mediaTypes = new AMMediaType[1];
       int typesFetched;
       hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
       if (hr != 0 || typesFetched == 0) break;
       if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
       {
         Log.Info("TSReader2WMV: found LATM AAC audio out pin on tsreader");
         usingAAC = true;
       }
     }
     bool usingH264 = false;
     hr = pinOut1.EnumMediaTypes(out enumMediaTypes);
     while (true)
     {
       AMMediaType[] mediaTypes = new AMMediaType[1];
       int typesFetched;
       hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
       if (hr != 0 || typesFetched == 0) break;
       if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1)
       {
         Log.Info("TSReader2WMV: found H.264 video out pin on tsreader");
         usingH264 = true;
       }
     }
     //Add the type of decoder required for the output video & audio pins on TSReader.
     Log.Info("TSReader2WMV: add audio/video decoders to graph");
     if (usingH264 == false)
     {
       Log.Info("TSReader2WMV: add mpeg2 video decoder:{0}", strVideoCodec);
       VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
       if (VideoCodec == null)
       {
         Log.Error("TSReader2WMV: unable to add mpeg2 video decoder");
         Cleanup();
         return false;
       }
     }
     else
     {
       Log.Info("TSReader2WMV: add h264 video codec:{0}", strH264VideoCodec);
       VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec);
       if (VideoCodec == null)
       {
         Log.Error("TSReader2WMV: FAILED:unable to add h264 video codec");
         Cleanup();
         return false;
       }
     }
     if (usingAAC == false)
     {
       Log.Info("TSReader2WMV: add mpeg2 audio codec:{0}", strAudioCodec);
       AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
       if (AudioCodec == null)
       {
         Log.Error("TSReader2WMV: FAILED:unable to add mpeg2 audio codec");
         Cleanup();
         return false;
       }
     }
     else
     {
       Log.Info("TSReader2WMV: add aac audio codec:{0}", strAACAudioCodec);
       AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
       if (AudioCodec == null)
       {
         Log.Error("TSReader2WMV: FAILED:unable to add aac audio codec");
         Cleanup();
         return false;
       }
     }
     Log.Info("TSReader2WMV: connect tsreader->audio/video decoders");
     //connect output #0 (audio) of tsreader->audio decoder input pin 0
     //connect output #1 (video) of tsreader->video decoder input pin 0
     pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
     pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
     if (pinIn0 == null || pinIn1 == null)
     {
       Log.Error("TSReader2WMV: FAILED: unable to get pins of video/audio codecs");
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut0, pinIn0);
     if (hr != 0)
     {
       Log.Error("TSReader2WMV: FAILED: unable to connect audio pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut1, pinIn1);
     if (hr != 0)
     {
       Log.Error("TSReader2WMV: FAILED: unable to connect video pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv");
     if (!AddWmAsfWriter(outputFilename, quality, standard)) return false;
     Log.Info("TSReader2WMV: start pre-run");
     mediaControl = graphBuilder as IMediaControl;
     mediaSeeking = tsreaderSource as IMediaSeeking;
     mediaEvt = graphBuilder as IMediaEventEx;
     mediaPos = graphBuilder as IMediaPosition;
     //get file duration
     long lTime = 5 * 60 * 60;
     lTime *= 10000000;
     long pStop = 0;
     hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                    AMSeekingSeekingFlags.NoPositioning);
     if (hr == 0)
     {
       long lStreamPos;
       mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
       m_dDuration = lStreamPos;
       lTime = 0;
       mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                 AMSeekingSeekingFlags.NoPositioning);
     }
     double duration = m_dDuration / 10000000d;
     Log.Info("TSReader2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration));
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("TSReader2WMV: FAILED: unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     int maxCount = 20;
     while (true)
     {
       long lCurrent;
       mediaSeeking.GetCurrentPosition(out lCurrent);
       double dpos = (double)lCurrent;
       dpos /= 10000000d;
       System.Threading.Thread.Sleep(100);
       if (dpos >= 2.0d) break;
       maxCount--;
       if (maxCount <= 0) break;
     }
     Log.Info("TSReader2WMV: pre-run done");
     Log.Info("TSReader2WMV: Get duration of movie");
     mediaControl.Stop();
     FilterState state;
     mediaControl.GetState(500, out state);
     GC.Collect();
     GC.Collect();
     GC.Collect();
     GC.WaitForPendingFinalizers();
     Log.Info("TSReader2WMV: reconnect mpeg2 video codec->ASF WM Writer");
     graphBuilder.RemoveFilter(fileWriterbase);
     if (!AddWmAsfWriter(outputFilename, quality, standard)) return false;
     Log.Info("TSReader2WMV: Start transcoding");
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("TSReader2WMV:FAILED:unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
   }
   catch (Exception e)
   {
     // TODO: Handle exceptions.
     Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message);
     return false;
   }
   return true;
 }
コード例 #55
0
        private bool AddWmAsfWriter(string fileName, Quality quality, Standard standard)
        {
            //add asf file writer
            IPin pinOut0, pinOut1;
            IPin pinIn0, pinIn1;

            Log.Info("DVRMS2WMV: add WM ASF Writer to graph");
            string monikerAsfWriter =
                @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{7C23220E-55BB-11D3-8B16-00C04FB6BD3D}";

            fileWriterbase = Marshal.BindToMoniker(monikerAsfWriter) as IBaseFilter;
            if (fileWriterbase == null)
            {
                Log.Error("DVRMS2WMV:FAILED:Unable to create ASF WM Writer");
                Cleanup();
                return(false);
            }

            int hr = graphBuilder.AddFilter(fileWriterbase, "WM ASF Writer");

            if (hr != 0)
            {
                Log.Error("DVRMS2WMV:FAILED:Add ASF WM Writer to filtergraph :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            IFileSinkFilter2 fileWriterFilter = fileWriterbase as IFileSinkFilter2;

            if (fileWriterFilter == null)
            {
                Log.Error("DVR2XVID:FAILED:Add unable to get IFileSinkFilter for filewriter");
                Cleanup();
                return(false);
            }
            hr = fileWriterFilter.SetFileName(fileName, null);
            hr = fileWriterFilter.SetMode(AMFileSinkFlags.OverWrite);
            Log.Info("DVRMS2WMV: connect audio/video codecs outputs -> ASF WM Writer");
            //connect output #0 of videocodec->asf writer pin 1
            //connect output #0 of audiocodec->asf writer pin 0
            pinOut0 = DsFindPin.ByDirection((IBaseFilter)Mpeg2AudioCodec, PinDirection.Output, 0);
            pinOut1 = DsFindPin.ByDirection((IBaseFilter)Mpeg2VideoCodec, PinDirection.Output, 0);
            if (pinOut0 == null || pinOut1 == null)
            {
                Log.Error("DVRMS2WMV:FAILED:unable to get outpins of video codec");
                Cleanup();
                return(false);
            }
            pinIn0 = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 0);
            if (pinIn0 == null)
            {
                Log.Error("DVRMS2WMV:FAILED:unable to get pins of asf wm writer");
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut0, pinIn0);
            if (hr != 0)
            {
                Log.Error("DVRMS2WMV:FAILED:unable to connect audio pins :0x{0:X}", hr);
                Cleanup();
                return(false);
            }
            pinIn1 = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 1);
            if (pinIn1 == null)
            {
                Log.Error("DVRMS2WMV:FAILED:unable to get pins of asf wm writer");
                Cleanup();
                return(false);
            }
            hr = graphBuilder.Connect(pinOut1, pinIn1);
            if (hr != 0)
            {
                Log.Error("DVRMS2WMV:FAILED:unable to connect video pins :0x{0:X}", hr);
                Cleanup();
                return(false);
            }

            IConfigAsfWriter   config          = fileWriterbase as IConfigAsfWriter;
            IWMProfileManager  profileManager  = null;
            IWMProfileManager2 profileManager2 = null;
            IWMProfile         profile         = null;

            hr = WMLib.WMCreateProfileManager(out profileManager);
            string strprofileType = "";

            switch (quality)
            {
            case Quality.HiDef:
                //hr = WMLib.WMCreateProfileManager(out profileManager);
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHiDef-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHiDef-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHiDef-PAL.prx");
                }
                Log.Info("DVRMS2WMV: set WMV HiDef quality profile {0}", strprofileType);
                break;

            case Quality.VeryHigh:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPVeryHigh-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPVeryHigh-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPVeryHigh-PAL.prx");
                }
                Log.Info("DVRMS2WMV: set WMV Very High quality profile {0}", strprofileType);
                break;

            case Quality.High:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHigh-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHigh-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPHigh-PAL.prx");
                }
                Log.Info("DVRMS2WMV: set WMV High quality profile {0}", strprofileType);
                break;

            case Quality.Medium:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPMedium-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPMedium-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPMedium-PAL.prx");
                }
                Log.Info("DVRMS2WMV: set WMV Medium quality profile {0}", strprofileType);
                break;

            case Quality.Low:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPLow-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPLow-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPLow-PAL.prx");
                }
                Log.Info("DVRMS2WMV: set WMV Low quality profile {0}", strprofileType);
                break;

            case Quality.Portable:
                if (standard == Standard.Film)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPPortable-FILM.prx");
                }
                if (standard == Standard.NTSC)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPPortable-NTSC.prx");
                }
                if (standard == Standard.PAL)
                {
                    strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPPortable-PAL.prx");
                }
                Log.Info("DVRMS2WMV: set WMV Portable quality profile {0}", strprofileType);
                break;

            case Quality.Custom:
                //load custom profile
                string customBitrate = "";
                //Adjust the parameters to suit the custom settings the user has selected.
                switch (bitrate)
                {
                case 0:
                    customBitrate = "100Kbs";
                    break;

                case 1:
                    customBitrate = "256Kbs";
                    break;

                case 2:
                    customBitrate = "384Kbs";
                    break;

                case 3:
                    customBitrate = "768Kbs";
                    break;

                case 4:
                    customBitrate = "1536Kbs";
                    break;

                case 5:
                    customBitrate = "3072Kbs";
                    break;

                case 6:
                    customBitrate = "5376Kbs";
                    break;
                }
                Log.Info("DVRMS2WMV: custom bitrate = {0}", customBitrate);
                //TODO: get fps values & frame size
                //TODO: adjust settings required
                //Call the SetCutomPorfile method to load the custom profile, adjust it's params from user settings & then save it.
                //SetCutomProfile(videoBitrate, audioBitrate, videoHeight, videoWidth, videoFps); //based on user inputs
                //We then reload it after as per other quality settings / profiles.
                strprofileType = Config.GetFile(Config.Dir.Base, @"Profiles\MPCustom.prx");
                Log.Info("DVRMS2WMV: set WMV Custom quality profile {0}", strprofileType);
                break;
            }
            //Loads profile from the above quality selection.
            using (StreamReader prx = new StreamReader(strprofileType))
            {
                String profileContents = prx.ReadToEnd();
                profileManager2 = profileManager as IWMProfileManager2;

                hr = profileManager2.LoadProfileByData(profileContents, out profile);
                if (hr != 0)
                {
                    Log.Info("DVRMS2WMV: get WMV profile - FAILED! {0}", hr);
                    Cleanup();
                    return(false);
                }
            }


            Log.Info("DVRMS2WMV: load profile - SUCCESS!");
            //configures the WM ASF Writer to the chosen profile
            hr = config.ConfigureFilterUsingProfile(profile);
            if (hr != 0)
            {
                Log.Info("DVRMS2WMV: configure profile - FAILED! {0}", hr);
                Cleanup();
                return(false);
            }
            Log.Info("DVRMS2WMV: configure profile - SUCCESS!");
            //TODO: Add DB recording information into WMV.

            //Release resorces
            if (profile != null)
            {
                Marshal.ReleaseComObject(profile);
                profile = null;
            }
            if (profileManager != null)
            {
                Marshal.ReleaseComObject(profileManager);
                profileManager = null;
            }
            return(true);
        }
コード例 #56
0
ファイル: TSReader2MP4.cs プロジェクト: arangas/MediaPortal-1
    private bool AddCodecs(IGraphBuilder graphBuilder, TranscodeInfo info)
    {
      //TODO: Add de-interlacing probably by filter
      int hr;
      Log.Info("TSReader2MP4: add h264 video encoder to graph");
      //Lead H264 Encoder (4.0)
      string monikerH264 = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{E2B7DF52-38C5-11D5-91F6-00104BDB8FF9}";
      h264Encoder = Marshal.BindToMoniker(monikerH264) as IBaseFilter;
      if (h264Encoder == null)
      {
        Log.Error("TSReader2MP4: FAILED: Unable to create h264 video encoder");
        Cleanup();
        return false;
      }
      hr = graphBuilder.AddFilter(h264Encoder, "h264 video encoder");
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: h264 video encoder to filtergraph :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      Log.Info("TSReader2MP4: add aac audio encoder to graph");
      //Monograph AAC Encoder
      //string monikerAAC = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{88F36DB6-D898-40B5-B409-466A0EECC26A}";
      //Lead AAC Encoder
      string monikerAAC = @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{E2B7DD70-38C5-11D5-91F6-00104BDB8FF9}";
      aacEncoder = Marshal.BindToMoniker(monikerAAC) as IBaseFilter;
      if (aacEncoder == null)
      {
        Log.Error("TSReader2MP4: FAILED: Unable to create aac audio encoder");
        Cleanup();
        return false;
      }
      hr = graphBuilder.AddFilter(aacEncoder, "aac audio encoder");
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: Add aac audio encoder to filtergraph :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      // dump filter ????
      //add filewriter 
      Log.Info("TSReader2MP4: add FileWriter to graph");
      string monikerFileWrite =
        @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{8596E5F0-0DA5-11D0-BD21-00A0C911CE86}";
      IBaseFilter fileWriterbase = Marshal.BindToMoniker(monikerFileWrite) as IBaseFilter;
      if (fileWriterbase == null)
      {
        Log.Error("TSReader2MP4: FAILED: Unable to create FileWriter");
        Cleanup();
        return false;
      }
      fileWriterFilter = fileWriterbase as IFileSinkFilter2;
      if (fileWriterFilter == null)
      {
        Log.Error("TSReader2MP4: FAILED: Add unable to get IFileSinkFilter for filewriter");
        Cleanup();
        return false;
      }

      hr = graphBuilder.AddFilter(fileWriterbase, "FileWriter");
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: Add FileWriter to filtergraph :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      //set output filename
      string outputFileName = System.IO.Path.ChangeExtension(info.file, ".mp4");
      Log.Info("TSReader2MP4: set output file to :{0}", outputFileName);
      hr = fileWriterFilter.SetFileName(outputFileName, null);
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: unable to set filename for filewriter :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      // add mp4 muxer
      Log.Info("TSReader2MP4: add MP4 Muxer to graph");
      //Lead ISO Multiplexer
      string monikermp4Muxer =
        @"@device:sw:{083863F1-70DE-11D0-BD40-00A0C911CE86}\{990D1978-E48D-43AF-B12D-24A7456EC89F}";
      mp4Muxer = Marshal.BindToMoniker(monikermp4Muxer) as IBaseFilter;
      if (mp4Muxer == null)
      {
        Log.Error("TSReader2MP4: FAILED: Unable to create MP4Mux");
        Cleanup();
        return false;
      }
      hr = graphBuilder.AddFilter(mp4Muxer, "MP4Mux");
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: Add MP4Mux to filtergraph :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      //connect output of audio codec to aac encoder
      IPin pinOut, pinIn;
      Log.Info("TSReader2MP4: connect audio codec->aac encoder");
      pinIn = DsFindPin.ByDirection(aacEncoder, PinDirection.Input, 0);
      if (pinIn == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get input pin of aac encoder:0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinOut = DsFindPin.ByDirection(AudioCodec, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get output pin of audio codec :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: unable to connect audio codec->aac encoder: 0x{0:X}", hr);
        Cleanup();
        return false;
      }
      //connect output of video codec to h264 encoder
      Log.Info("TSReader2MP4: connect video codec->h264 encoder");
      pinIn = DsFindPin.ByDirection(h264Encoder, PinDirection.Input, 0);
      if (pinIn == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get input pin of h264 encoder:0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinOut = DsFindPin.ByDirection(VideoCodec, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get output pin of video codec :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: unable to connect video codec->h264 encoder :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      //connect output of aac encoder to pin#0 of mp4mux
      Log.Info("TSReader2MP4: connect aac encoder->mp4mux");
      pinOut = DsFindPin.ByDirection(aacEncoder, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get input pin of aac encoder:0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinIn = DsFindPin.ByDirection(mp4Muxer, PinDirection.Input, 0);
      if (pinIn == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get input pin#1 of mp4 muxer :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: unable to connect aac encoder->mp4mux: 0x{0:X}", hr);
        Cleanup();
        return false;
      }
      //connect output of h264 encoder to pin#1 of mp4mux
      Log.Info("TSReader2MP4: connect h264 encoder->mp4mux");
      pinOut = DsFindPin.ByDirection(h264Encoder, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get input pin of h264 encoder :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinIn = DsFindPin.ByDirection(mp4Muxer, PinDirection.Input, 1);
      if (pinIn == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get input#0 pin of mp4mux :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: unable to connect h264 encoder->mp4mux :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      // dump filter??
      //connect mp4 muxer out->filewriter
      Log.Info("TSReader2MP4: connect mp4mux->filewriter");
      pinOut = DsFindPin.ByDirection(mp4Muxer, PinDirection.Output, 0);
      if (pinOut == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get output pin of avimux:0x{0:X}", hr);
        Cleanup();
        return false;
      }
      pinIn = DsFindPin.ByDirection(fileWriterbase, PinDirection.Input, 0);
      if (pinIn == null)
      {
        Log.Error("TSReader2MP4: FAILED: cannot get input pin of filewriter :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      hr = graphBuilder.Connect(pinOut, pinIn);
      if (hr != 0)
      {
        Log.Error("TSReader2MP4: FAILED: connect mp4 muxer->filewriter :0x{0:X}", hr);
        Cleanup();
        return false;
      }
      return true;
    }