示例#1
0
        void PreviewInit()
        {
            m_dvdNav  = (IBaseFilter) new DVDNavigator();
            m_dvdCtrl = m_dvdNav as IDvdControl2;
            int hr = m_dvdCtrl.SetDVDDirectory(Disk.VIDEO_TS);

            DsError.ThrowExceptionForHR(hr);

            m_dvdInfo = m_dvdCtrl as IDvdInfo2;

            m_filterGraph = (IGraphBuilder) new FilterGraph();
            hr            = m_filterGraph.AddFilter(m_dvdNav, "DVD Navigator");
            DsError.ThrowExceptionForHR(hr);

            m_renderer = (IBaseFilter) new VideoMixingRenderer9();
            IVMRFilterConfig9 filterConfig = (IVMRFilterConfig9)m_renderer;

            hr = filterConfig.SetRenderingMode(VMR9Mode.Renderless);
            DsError.ThrowExceptionForHR(hr);

            hr = filterConfig.SetNumberOfStreams(1);
            DsError.ThrowExceptionForHR(hr);

            hr = m_filterGraph.AddFilter(m_renderer, "Video Mix 9");
            DsError.ThrowExceptionForHR(hr);

            IPin videoPin;

            hr = m_dvdNav.FindPin("Video", out videoPin);
            DsError.ThrowExceptionForHR(hr);

            IPin audioPin;

            hr = m_dvdNav.FindPin("AC3", out audioPin);
            DsError.ThrowExceptionForHR(hr);

            //hr = m_filterGraph.Render(videoPin);
            //DsError.ThrowExceptionForHR(hr);
            //hr = m_filterGraph.Render(audioPin);
            //DsError.ThrowExceptionForHR(hr);

            //IMediaControl mediaCtrl = (IMediaControl)m_filterGraph;

            //hr = mediaCtrl.Run();
            //DsError.ThrowExceptionForHR(hr);

            //hr = m_dvdCtrl.SetOption(DvdOptionFlag.EnableNonblockingAPIs, true);
            //DsError.ThrowExceptionForHR(hr);
            //m_dvdCtrl.SetOption(DvdOptionFlag.ResetOnStop, true);
        }
示例#2
0
文件: BaseGraph.cs 项目: ewin66/media
        /// <summary>
        /// Connects an upstream filter, with a pin matching a specific media type, to the named pin on the downstream filter.
        /// Finds the
        /// </summary>
        /// <param name="upFilter">upstream filter</param>
        /// <param name="upPinMajor">upstream pin major media type</param>
        /// <param name="upPinSub">upstream pin sub media type</param>
        /// <param name="downFilter">downstream filter</param>
        /// <param name="downPinName">downstream pin name</param>
        /// <param name="useIntelligentConnect">
        /// TRUE to use intelligent connect inserting filters if needed
        /// FALSE to directly connect filters
        /// </param>
        protected void ConnectFilters(IBaseFilter upFilter, Guid upPinMajor, Guid upPinSub, IBaseFilter downFilter, string downPinName, bool useIntelligentConnect)
        {
            IPin downstreamPin = null;
            IPin upstreamPin   = null;

            try
            {
                try
                {
                    upstreamPin = FindPinWithMediaType(upFilter, PinDirection.Output, upPinMajor, upPinSub);
                }
                catch (Exception ex)
                {
                    throw new Exception("Upstream filter has no such pin!", ex);
                }

                downFilter.FindPin(downPinName, out downstreamPin);
                if (downstreamPin == null)
                {
                    throw new Exception("Downstream filter has no pin \"" + downPinName + "\"!");
                }

                ConnectFilters(upstreamPin, downstreamPin, useIntelligentConnect);
            }
            finally
            {
                upstreamPin.Release();
                downstreamPin.Release();
            }
        }
示例#3
0
        /// <summary>
        /// Add a DV Splitter filter and connect it to the DV source.  If the DV Splitter is already there, do nothing.
        /// </summary>
        /// <returns></returns>
        private bool AddDVSplitter()
        {
            if ((dvSplitter != null) &&
                (splitterAudioOut != null) &&
                (splitterVideoOut != null))
            {
                return(true);
            }

            //Add a DVSplitter and connect it.
            try {
                dvSplitter = Filter.CreateBaseFilterByName("DV Splitter");
                iGB.AddFilter(dvSplitter, "DV Splitter");
                IPin dvSplitterInput;
                dvSplitter.FindPin("Input", out dvSplitterInput);
                iGB.Connect(source.OutputPin, dvSplitterInput);
            }
            catch (COMException) {
                dvSplitter       = null;
                splitterVideoOut = splitterAudioOut = null;
                return(false);
            }

            //Find output pins
            try {
                this.splitterVideoOut = Filter.GetPin(dvSplitter, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, true, 0);
                this.splitterAudioOut = Filter.GetPin(dvSplitter, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, true, 1);
            }
            catch (COMException) {
                dvSplitter       = null;
                splitterVideoOut = splitterAudioOut = null;
                return(false);
            }
            return(true);
        }
示例#4
0
        private bool IsAudioPinPresent(IBaseFilter lavSourceFilter)
        {
            IPin audioPin = null;
            var  hr       = lavSourceFilter.FindPin("Audio", out audioPin);

            return(audioPin != null);
        }
示例#5
0
        /// <summary>
        /// Determines what streams are available on the Net Demux.
        /// Creates channels in the GMF Bridge controller accordingly.
        /// Then, creates the GMF Bridge Sink, and connects the streams to their respective pins.
        /// </summary>
        private void RenderNetDemux()
        {
            List <DetailPinInfo> pins = null;

            try
            {
                //fetch all pins on this filter
                pins = netDemux.EnumPinsDetails();

                //create list of pins we care about
                List <IPin> demuxPins = new List <IPin>();

                //get output pins of type video or audio
                foreach (DetailPinInfo i in pins)
                {
                    if (i.Info.dir == PinDirection.Output)
                    {
                        if (i.Type.majorType == MediaType.Video)
                        {
                            controller.AddStream(1, eFormatType.eAny, 1);
                            demuxPins.Add(i.Pin);
                        }
                        else if (i.Type.majorType == MediaType.Audio)
                        {
                            controller.AddStream(0, eFormatType.eAny, 1);
                            demuxPins.Add(i.Pin);
                        }
                    }
                }

                //create GMF Sink
                output = (IBaseFilter)controller.InsertSinkFilter(graph);
                //connect Demux to GMF Sink
                for (int i = 0; i < demuxPins.Count; i++)
                {
                    IPin sinkPin;
                    int  hr = output.FindPin("Input " + (i + 1).ToString(), out sinkPin);
                    if (hr == 0)
                    {
                        FilterGraphTools.ConnectFilters(graph, demuxPins[i], sinkPin, false);
                        Marshal.ReleaseComObject(sinkPin);
                    }
                }
            }
            catch (Exception ex)
            {
                Release(output);
                output = null;
                throw ex;
            }
            finally
            {
                if (pins != null)
                {
                    pins.Release();
                }
            }
        }
示例#6
0
        public override void AddedToGraph(FilgraphManager fgm) {
            IGraphBuilder gb = (IGraphBuilder)fgm;

            //Add the Blackmagic Decoder filter and connect it.
            try {
                bfDecoder = Filter.CreateBaseFilterByName("Blackmagic Design Decoder (DMO)");
                gb.AddFilter(bfDecoder, "Blackmagic Design Decoder (DMO)");
                IPin decoderInput;
                bfDecoder.FindPin("in0", out decoderInput);
                bfDecoder.FindPin("out0", out decoderOutput);
                captureOutput = GetPin(filter, _PinDirection.PINDIR_OUTPUT, Pin.PIN_CATEGORY_CAPTURE, Guid.Empty, false, 0);
                gb.Connect(captureOutput, decoderInput);
            }
            catch {
                throw new ApplicationException("Failed to add the BlackMagic Decoder filter to the graph");
            }

            base.AddedToGraph(fgm);
        }
示例#7
0
        public override void AddedToGraph(FilgraphManager fgm)
        {
            IGraphBuilder gb = (IGraphBuilder)fgm;

            //Add the Blackmagic Decoder filter and connect it.
            try {
                bfDecoder = Filter.CreateBaseFilterByName("Blackmagic Design Decoder (DMO)");
                gb.AddFilter(bfDecoder, "Blackmagic Design Decoder (DMO)");
                IPin decoderInput;
                bfDecoder.FindPin("in0", out decoderInput);
                bfDecoder.FindPin("out0", out decoderOutput);
                captureOutput = GetPin(filter, _PinDirection.PINDIR_OUTPUT, Pin.PIN_CATEGORY_CAPTURE, Guid.Empty, false, 0);
                gb.Connect(captureOutput, decoderInput);
            }
            catch {
                throw new ApplicationException("Failed to add the BlackMagic Decoder filter to the graph");
            }

            base.AddedToGraph(fgm);
        }
示例#8
0
        /// <summary>Connects together to graph filters.</summary>
        /// <param name="graph">The graph on which the filters exist.</param>
        /// <param name="source">The source filter.</param>
        /// <param name="outPinName">The name of the output pin on the source filter.</param>
        /// <param name="destination">The destination filter.</param>
        /// <param name="inPinName">The name of the input pin on the destination filter.</param>
        protected void Connect(IGraphBuilder graph, IBaseFilter source, string outPinName,
                               IBaseFilter destination, string inPinName)
        {
            IPin outPin = source.FindPin(outPinName);

            DisposalCleanup.Add(outPin);

            IPin inPin = destination.FindPin(inPinName);

            DisposalCleanup.Add(inPin);

            graph.Connect(outPin, inPin);
        }
示例#9
0
        private bool IsVideoH264(IBaseFilter lavSourceFilter)
        {
            IPin videoPin;
            var  hr = lavSourceFilter.FindPin("Video", out videoPin);

            DsError.ThrowExceptionForHR(hr);
            var             mediaType = new AMMediaType[1];
            IEnumMediaTypes mediaTypes;

            videoPin.EnumMediaTypes(out mediaTypes);
            mediaTypes.Next(1, mediaType, (IntPtr)0);
            return(mediaType[0].subType == MediaSubType.H264);
        }
示例#10
0
        private static void Connect_up(int dInput, int dOutput)
        {
            object source = null, inputD = null;

            DsDevice[] devices;
            devices = DsDevice.GetDevicesOfCat(FilterCategory.AudioRendererCategory);
            DsDevice device = devices[dOutput];
            Guid     iid    = typeof(IBaseFilter).GUID;

            device.Mon.BindToObject(null, null, ref iid, out source);

            m_objFilterGraph = (IGraphBuilder) new FilterGraph();
            m_objFilterGraph.AddFilter((IBaseFilter)source, "Audio Input pin (rendered)");

            devices = DsDevice.GetDevicesOfCat(FilterCategory.AudioInputDevice);
            device  = devices[dInput];
            // iid change?
            device.Mon.BindToObject(null, null, ref iid, out inputD);

            m_objFilterGraph.AddFilter((IBaseFilter)inputD, "Capture");

            int       result;
            IEnumPins pInputPin = null, pOutputPin = null; // Pin enumeration
            IPin      pIn = null, pOut = null;             // Pins

            try
            {
                IBaseFilter newI = (IBaseFilter)inputD;
                result = newI.EnumPins(out pInputPin);// Enumerate the pin
                if (result.Equals(0))
                {
                    // Get hold of the pin as seen in GraphEdit
                    newI.FindPin("Capture", out pIn);
                }
                IBaseFilter ibfO = (IBaseFilter)source;
                ibfO.EnumPins(out pOutputPin);//Enumerate the pin

                ibfO.FindPin("Audio Input pin (rendered)", out pOut);
                try
                {
                    pIn.Connect(pOut, null);  // Connect the input pin to output pin
                }
                catch (Exception ex)
                { Console.WriteLine(ex.Message); }
            }
            catch (Exception ex)
            { Console.WriteLine(ex.Message); }

            m_objBasicAudio   = m_objFilterGraph as IBasicAudio;
            m_objMediaControl = m_objFilterGraph as IMediaControl;
        }
示例#11
0
        void ConnectPins(IBaseFilter upFilter, string upName, IBaseFilter downFilter, string downName)
        {
            int          hr;
            IPin         pin1, pin2;
            PinDirection PinDirThis;

            if (upName == "CapturePin")
            {
                pin1 = captureDevOutPin;
            }
            else
            {
                hr = upFilter.FindPin(upName, out pin1);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                hr = pin1.QueryDirection(out PinDirThis);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if (PinDirThis != PinDirection.Output)
                {
                    throw new Exception("Wrong upstream pin");
                }
            }
            //pin1 = GetPin(upFilter, PinDirection.Output);
            hr = downFilter.FindPin(downName, out pin2);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            hr = pin2.QueryDirection(out PinDirThis);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            if (PinDirThis != PinDirection.Input)
            {
                throw new Exception("Wrong downstream pin");
            }
            //pin2 = GetPin(downFilter, PinDirection.Input);
            hr = graphBuilder.Connect(pin1, pin2);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
        }
示例#12
0
        /// <summary>
        /// Add a DV Splitter filter and connect it to the DV source.  If the DV Splitter is already there, do nothing.
        /// </summary>
        /// <returns></returns>
        private bool AddDVSplitter()
        {
            if ((dvSplitter != null) &&
                (splitterAudioOut != null) &&
                (splitterVideoOut != null))
                return true;

            //Add a DVSplitter and connect it.
            try {
                dvSplitter = Filter.CreateBaseFilterByName("DV Splitter");
                iGB.AddFilter(dvSplitter, "DV Splitter");
                IPin dvSplitterInput;
                dvSplitter.FindPin("Input", out dvSplitterInput);
                iGB.Connect(source.OutputPin, dvSplitterInput);
            }
            catch (COMException) {
                dvSplitter = null;
                splitterVideoOut = splitterAudioOut = null;
                return false;
            }

            //Find output pins
            try {
                this.splitterVideoOut = Filter.GetPin(dvSplitter, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, true, 0);
                this.splitterAudioOut = Filter.GetPin(dvSplitter, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, true, 1);
            }
            catch (COMException) {
                dvSplitter = null;
                splitterVideoOut = splitterAudioOut = null;
                return false;
            }
            return true;
        }
示例#13
0
        /* // TODO: We need to update this
         * WTV Files Pin Mapping (pin name between ||)
         *  Audio       -> Source Pin |DVR Out - 1| -> PBDA DT Filter |In(Enc/Tag)| |Out| -> Dump |Input|
         *  Video       -> Source Pin |DVR Out - 2| -> PBDA DT Filter |In(Enc/Tag)| |Out| -> Dump |Input|
         *  Subtitle    -> Source Pin |DVR Out - 5| -> PBDA DT Filter |In(Enc/Tag)| |Out| -> Dump |Input|
         *
         * DVRMS Files Pin Mapping (pin name between ||)
         *  Audio       -> Source Pin |DVR Out - 1| -> Decrypt/Tag Filter |In(Enc/Tag)| |Out| -> Dump |Input|
         *  Video       -> Source Pin |DVR Out - 3| -> Decrypt/Tag Filter |In(Enc/Tag)| |Out| -> Dump |Input|
         *  Subtitle    -> Source Pin |DVR Out - 2| -> Decrypt/Tag Filter |In(Enc/Tag)| |Out| -> Dump |Input|
         */
        private void ConnectDecryptedDump(string sourceOutPinName, string DumpFileName)
        {
            int         hr;
            Type        comtype;
            IBaseFilter DecryptF;
            IPin        PinOut, PinIn;

            //Create the decrypt filter
            if (_CLSI_Decryptor != MediaType.Null)
            {
                _jobLog.WriteEntry(this, "Connecting Decryption filter", Log.LogEntryType.Debug);
                comtype  = Type.GetTypeFromCLSID(_CLSI_Decryptor);
                DecryptF = (IBaseFilter)Activator.CreateInstance(comtype);
                hr       = _gb.AddFilter((IBaseFilter)DecryptF, "Decrypt" + _gbFiltersCount++.ToString(CultureInfo.InvariantCulture));
                checkHR(hr);

                DecryptF.FindPin("In(Enc/Tag)", out PinIn);     // Get the decrypt filter pinIn |In(Enc/Tag)|
                _SourceF.FindPin(sourceOutPinName, out PinOut); // Get the Source filter pinOut (name taken from sourceOutPinName)

                try
                {
                    // Try to connect the decrypt filter if it is needed
                    hr = _gb.ConnectDirect(PinOut, PinIn, null); // Connect the source filter pinOut to the decrypt filter pinIn
                    checkHR(hr);
                    DecryptF.FindPin("Out", out PinOut);         // Get the Decrypt filter pinOut |Out| (for the next filter to connect to)
                }
                catch
                {
                    // Otherwise go direct
                    _SourceF.FindPin(sourceOutPinName, out PinOut); // Otherwise, go direct and get the source filter pinOut (name taken from sourceOutPinName) for the next filter to connect to
                }
            }
            else
            {
                _SourceF.FindPin(sourceOutPinName, out PinOut);  // Otherwise, go direct and get the source filter pinOut (name taken from sourceOutPinName) for the next filter to connect to
            }
            // Check if we need a Video Subtitle decoder (Line 21) (here use the Microsoft DTV decoder) - the subtitles are embedded in the Video stream

            /*if (UseVideoSubtitleDecoder)
             * {
             *  IBaseFilter SubtitleF;
             *
             *  // TODO: We need to add TEE splitter here and a new DUMP filter here and connect the tee output to the DTV decoder and then Line21 to Dump otherwise we end up with either video or Line21, we want both
             *  _jobLog.WriteEntry(this, "Connecting Video Subtitle Extraction filter", Log.LogEntryType.Debug);
             *  comtype = Type.GetTypeFromCLSID(CLSID_SubtitleDecoder);
             *  SubtitleF = (IBaseFilter)Activator.CreateInstance(comtype);
             *  hr = _gb.AddFilter((IBaseFilter)SubtitleF, "Subtitle" + _gbFilters.Count.ToString(CultureInfo.InvariantCulture));
             *  checkHR(hr);
             *  _gbFilters.Add(SubtitleF); // Keep track of filters to be released afterwards
             *
             *  // Get the subtitle filter pinIn |Video Input|
             *  SubtitleF.FindPin("Video Input", out PinIn);
             *
             *  // Try to connect the subtitle filter pinIn to the previous filter pinOut
             *  hr = _gb.ConnectDirect(PinOut, PinIn, null);
             *  checkHR(hr);
             *  SubtitleF.FindPin("~Line21 Output", out PinOut); // Get the new pinOut |~Line21 Output| from the subtitle filter for the next filter to connect to
             * }*/

            // Create the dump filter
            DumpFilter df = new DumpFilter();

            // Add the filter to the graph
            hr = _gb.AddFilter(df, "Dump" + _gbFiltersCount++.ToString(CultureInfo.InvariantCulture));
            checkHR(hr);

            // Set destination filename
            hr = df.SetFileName(DumpFileName, null);
            checkHR(hr);

            // Connect the dump filter pinIn |Input| to the previous filter pinOut
            _jobLog.WriteEntry(this, "Connecting MCEBuddy DumpStreams filter pins", Log.LogEntryType.Debug);
            hr = df.FindPin("Input", out PinIn);
            checkHR(hr);
            hr = _gb.ConnectDirect(PinOut, PinIn, null);
            checkHR(hr);

            _jobLog.WriteEntry(this, "All filters successfully connected", Log.LogEntryType.Debug);
        }
示例#14
0
        /// <summary>Connects together to graph filters.</summary>
        /// <param name="graph">The graph on which the filters exist.</param>
        /// <param name="source">The source filter.</param>
        /// <param name="outPinName">The name of the output pin on the source filter.</param>
        /// <param name="destination">The destination filter.</param>
        /// <param name="inPinName">The name of the input pin on the destination filter.</param>
        protected void Connect(IGraphBuilder graph, IBaseFilter source, string outPinName, 
			IBaseFilter destination, string inPinName)
        {
            IPin outPin = source.FindPin(outPinName);
            DisposalCleanup.Add(outPin);

            IPin inPin = destination.FindPin(inPinName);
            DisposalCleanup.Add(inPin);

            graph.Connect(outPin, inPin);
        }
示例#15
0
        private static void tビデオレンダラとその入力ピンを探して返す(IFilterGraph graph, out IBaseFilter videoRenderer, out IPin inputPin)
        {
            int    hr       = 0;
            string strフィルタ名 = null;
            string strピンID  = null;


            // ビデオレンダラと入力ピンを探し、そのフィルタ名とピンIDを控える。

            IEnumFilters eFilters;

            hr = graph.EnumFilters(out eFilters);
            DsError.ThrowExceptionForHR(hr);
            try
            {
                var filters = new IBaseFilter[1];
                while (eFilters.Next(1, filters, IntPtr.Zero) == CWin32.S_OK)
                {
                    try
                    {
                        #region [ 出力ピンがない(レンダラである)ことを確認する。]
                        //-----------------
                        IEnumPins ePins;
                        bool      b出力ピンがある = false;

                        hr = filters[0].EnumPins(out ePins);
                        DsError.ThrowExceptionForHR(hr);
                        try
                        {
                            var pins = new IPin[1];
                            while (ePins.Next(1, pins, IntPtr.Zero) == CWin32.S_OK)
                            {
                                try
                                {
                                    if (b出力ピンがある)
                                    {
                                        continue;
                                    }

                                    PinDirection dir;
                                    hr = pins[0].QueryDirection(out dir);
                                    DsError.ThrowExceptionForHR(hr);
                                    if (dir == PinDirection.Output)
                                    {
                                        b出力ピンがある = true;
                                    }
                                }
                                finally
                                {
                                    CCommon.tReleaseComObject(ref pins[0]);
                                }
                            }
                        }
                        finally
                        {
                            CCommon.tReleaseComObject(ref ePins);
                        }

                        if (b出力ピンがある)
                        {
                            continue;                                   // 次のフィルタへ
                        }
                        //-----------------
                        #endregion
                        #region [ 接続中の入力ピンが MEDIATYPE_Video に対応していたら、フィルタ名とピンIDを取得する。]
                        //-----------------
                        hr = filters[0].EnumPins(out ePins);
                        DsError.ThrowExceptionForHR(hr);
                        try
                        {
                            var pins = new IPin[1];
                            while (ePins.Next(1, pins, IntPtr.Zero) == CWin32.S_OK)
                            {
                                try
                                {
                                    if (!string.IsNullOrEmpty(strフィルタ名))
                                    {
                                        continue;
                                    }

                                    var mediaType = new AMMediaType();

                                    #region [ 現在接続中の MediaType を取得。つながってなければ次のピンへ。]
                                    //-----------------
                                    hr = pins[0].ConnectionMediaType(mediaType);
                                    if (hr == CWin32.VFW_E_NOT_CONNECTED)
                                    {
                                        continue;                                               // つながってない
                                    }
                                    DsError.ThrowExceptionForHR(hr);
                                    //-----------------
                                    #endregion

                                    try
                                    {
                                        if (mediaType.majorType.Equals(MediaType.Video))
                                        {
                                            #region [ フィルタ名取得!]
                                            //-----------------
                                            FilterInfo filterInfo;
                                            hr = filters[0].QueryFilterInfo(out filterInfo);
                                            DsError.ThrowExceptionForHR(hr);
                                            strフィルタ名 = filterInfo.achName;
                                            CCommon.tReleaseComObject(ref filterInfo.pGraph);
                                            //-----------------
                                            #endregion
                                            #region [ ピンID取得!]
                                            //-----------------
                                            hr = pins[0].QueryId(out strピンID);
                                            DsError.ThrowExceptionForHR(hr);
                                            //-----------------
                                            #endregion

                                            continue;                                                   // 次のピンへ。
                                        }
                                    }
                                    finally
                                    {
                                        DsUtils.FreeAMMediaType(mediaType);
                                    }
                                }
                                finally
                                {
                                    CCommon.tReleaseComObject(ref pins[0]);
                                }
                            }
                        }
                        finally
                        {
                            CCommon.tReleaseComObject(ref ePins);
                        }

                        //-----------------
                        #endregion
                    }
                    finally
                    {
                        CCommon.tReleaseComObject(ref filters[0]);
                    }
                }
            }
            finally
            {
                CCommon.tReleaseComObject(ref eFilters);
            }


            // 改めてフィルタ名とピンIDからこれらのインターフェースを取得し、戻り値として返す。

            videoRenderer = null;
            inputPin      = null;

            if (!string.IsNullOrEmpty(strフィルタ名))
            {
                hr = graph.FindFilterByName(strフィルタ名, out videoRenderer);
                DsError.ThrowExceptionForHR(hr);

                hr = videoRenderer.FindPin(strピンID, out inputPin);
                DsError.ThrowExceptionForHR(hr);
            }
        }
        /*
         * protected void InitAudioSampleGrabber()
         * {
         *  // Get the graph builder
         *  IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder);
         *  if (graphBuilder == null)
         *      return;
         *
         *  try
         *  {
         *      // Build the sample grabber
         *      sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true))
         *          as ISampleGrabber;
         *
         *      if (sampleGrabber == null)
         *          return;
         *
         *      // Add it to the filter graph
         *      int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber");
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      AMMediaType mtAudio = new AMMediaType();
         *      mtAudio.majorType = MediaType.Audio;
         *      mtAudio.subType = MediaSubType.PCM;
         *      mtAudio.formatPtr = IntPtr.Zero;
         *
         *      _actualAudioFormat = null;
         *
         *      hr = sampleGrabber.SetMediaType(mtAudio);
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      hr = sampleGrabber.SetBufferSamples(true);
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      hr = sampleGrabber.SetOneShot(false);
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      hr = sampleGrabber.SetCallback(this, 1);
         *      DsError.ThrowExceptionForHR(hr);
         *
         *      sampleAnalyzerMustStop.Reset();
         *      sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop));
         *      sampleAnalyzerThread.Priority = ThreadPriority.Highest;
         *      sampleAnalyzerThread.Start();
         *  }
         *  catch(Exception ex)
         *  {
         *      Logger.LogException(ex);
         *  }
         *
         *  rotEntry = new DsROTEntry(graphBuilder as IFilterGraph);
         * }*/

        protected void InitAudioSampleGrabber_v2()
        {
            // Get the graph builder
            IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder);

            if (graphBuilder == null)
            {
                return;
            }

            try
            {
                // Build the sample grabber
                sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true))
                                as ISampleGrabber;

                if (sampleGrabber == null)
                {
                    return;
                }

                // Add it to the filter graph
                int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber_v2");
                DsError.ThrowExceptionForHR(hr);

                IBaseFilter ffdAudioDecoder = null;

                IPin   ffdAudioDecoderOutput = null;
                IPin   soundDeviceInput      = null;
                IPin   sampleGrabberInput    = null;
                IPin   sampleGrabberOutput   = null;
                IntPtr pSoundDeviceInput     = IntPtr.Zero;

                // When using FFDShow, typically we'll find
                // a ffdshow Audio Decoder connected to the sound device filter
                //
                // i.e. [ffdshow Audio Decoder] --> [DirectSound Device]
                //
                // Our audio sample grabber supports only PCM sample input and output.
                // Its entire processing is based on this assumption.
                //
                // Thus need to insert the audio sample grabber between the ffdshow Audio Decoder and the sound device
                // because this is the only place where we can find PCM samples. The sound device only accepts PCM.
                //
                // So we need to turn this graph:
                //
                // .. -->[ffdshow Audio Decoder]-->[DirectSound Device]
                //
                // into this:
                //
                // .. -->[ffdshow Audio Decoder]-->[Sample grabber]-->[DirectSound Device]
                //
                // Actions to do to achieve the graph change:
                //
                // 1. Locate the ffdshow Audio Decoder in the graph
                // 2. Find its output pin and the pin that it's connected to
                // 3. Locate the input and output pins of sample grabber
                // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin)
                // 5. Connect the ffdshow Audio Decoder to sample grabber input
                // 6. Connect the sample grabber output to sound device input
                // that's all.

                // --------------
                // 1. Locate the ffdshow Audio Decoder in the graph
                hr = graphBuilder.FindFilterByName("ffdshow Audio Decoder", out ffdAudioDecoder);
                DsError.ThrowExceptionForHR(hr);

                // 2. Find its output pin and the pin that it's connected to
                hr = ffdAudioDecoder.FindPin("Out", out ffdAudioDecoderOutput);
                DsError.ThrowExceptionForHR(hr);

                hr = ffdAudioDecoderOutput.ConnectedTo(out pSoundDeviceInput);
                DsError.ThrowExceptionForHR(hr);

                soundDeviceInput = new DSPin(pSoundDeviceInput).Value;

                // 3. Locate the input and output pins of sample grabber
                hr = (sampleGrabber as IBaseFilter).FindPin("In", out sampleGrabberInput);
                DsError.ThrowExceptionForHR(hr);

                hr = (sampleGrabber as IBaseFilter).FindPin("Out", out sampleGrabberOutput);
                DsError.ThrowExceptionForHR(hr);

                // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin)
                hr = ffdAudioDecoderOutput.Disconnect();
                DsError.ThrowExceptionForHR(hr);

                hr = soundDeviceInput.Disconnect();
                DsError.ThrowExceptionForHR(hr);

                // 5. Connect the ffdshow Audio Decoder to sample grabber input
                hr = graphBuilder.Connect(ffdAudioDecoderOutput, sampleGrabberInput);
                DsError.ThrowExceptionForHR(hr);

                // 6. Connect the sample grabber output to sound device input
                hr = graphBuilder.Connect(sampleGrabberOutput, soundDeviceInput);
                DsError.ThrowExceptionForHR(hr);


                AMMediaType mtAudio = new AMMediaType();
                mtAudio.majorType = MediaType.Audio;
                mtAudio.subType   = MediaSubType.PCM;
                mtAudio.formatPtr = IntPtr.Zero;

                _actualAudioFormat = null;

                sampleGrabber.SetMediaType(mtAudio);
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(this, 1);

                sampleAnalyzerMustStop.Reset();
                sampleAnalyzerThread          = new Thread(new ThreadStart(SampleAnalyzerLoop));
                sampleAnalyzerThread.Priority = ThreadPriority.Highest;
                sampleAnalyzerThread.Start();
            }
            catch (Exception ex)
            {
                Logger.LogException(ex);
            }

            rotEntry = new DsROTEntry(graphBuilder as IFilterGraph);
        }
示例#17
0
 void ConnectPins(IBaseFilter upFilter, string upName, IBaseFilter downFilter, string downName)
 {
     int hr;
     IPin pin1, pin2;
     PinDirection PinDirThis;
     if (upName == "CapturePin")
     {
         pin1 = captureDevOutPin;
     }
     else
     {
         hr = upFilter.FindPin(upName, out pin1);
         if (hr < 0)
             Marshal.ThrowExceptionForHR(hr);
         hr = pin1.QueryDirection(out PinDirThis);
         if (hr < 0)
             Marshal.ThrowExceptionForHR(hr);
         if (PinDirThis != PinDirection.Output)
             throw new Exception("Wrong upstream pin");
     }
     //pin1 = GetPin(upFilter, PinDirection.Output);
     hr = downFilter.FindPin(downName, out pin2);
     if (hr < 0)
         Marshal.ThrowExceptionForHR(hr);
     hr = pin2.QueryDirection(out PinDirThis);
     if (hr < 0)
         Marshal.ThrowExceptionForHR(hr);
     if (PinDirThis != PinDirection.Input)
         throw new Exception("Wrong downstream pin");
     //pin2 = GetPin(downFilter, PinDirection.Input);
     hr = graphBuilder.Connect(pin1, pin2);
     if (hr < 0)
         Marshal.ThrowExceptionForHR(hr);
 }
示例#18
0
        public void SetDeinterlaceMode()
        {
            if (!GUIGraphicsContext.IsEvr)
            {
                if (!_isVmr9Initialized)
                {
                    return;
                }
                Log.Debug("VMR9: SetDeinterlaceMode()");
                IVMRDeinterlaceControl9 deinterlace = (IVMRDeinterlaceControl9)_vmr9Filter;
                IPin InPin = null;
                int  hr    = _vmr9Filter.FindPin("VMR Input0", out InPin);
                if (hr != 0)
                {
                    Log.Error("VMR9: failed finding InPin {0:X}", hr);
                }
                AMMediaType mediatype = new AMMediaType();
                InPin.ConnectionMediaType(mediatype);
                //Start by getting the media type of the video stream.
                //Only VideoInfoHeader2 formats can be interlaced.
                if (mediatype.formatType == FormatType.VideoInfo2)
                {
                    Log.Debug("VMR9: SetDeinterlaceMode - FormatType = VideoInfo2");
                    int numModes = 0;
                    VideoInfoHeader2 VideoHeader2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(mediatype.formatPtr, VideoHeader2);
                    VMR9VideoDesc VideoDesc = new VMR9VideoDesc();
                    // If the FormatType is VideoInfo2, check the dwInterlaceFlags field for the AMInterlace.IsInterlaced flag.
                    //The presence of this flag indicates the video is interlaced.
                    if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                    {
                        Log.Debug("VMR9: SetDeinterlaceMode - Interlaced frame detected");
                        //Fill in the VMR9VideoDesc structure with a description of the video stream.
                        VideoDesc.dwSize         = Marshal.SizeOf(VideoDesc);    // dwSize: Set this field to sizeof(VMR9VideoDesc).
                        VideoDesc.dwSampleWidth  = VideoHeader2.BmiHeader.Width; // dwSampleWidth: Set this field to pBMI->biWidth.
                        VideoDesc.dwSampleHeight = VideoHeader2.BmiHeader.Height;
                        // dwSampleHeight: Set this field to abs(pBMI->biHeight).
                        //SampleFormat: This field describes the interlace characteristics of the media type.
                        //Check the dwInterlaceFlags field in the VIDEOINFOHEADER2 structure, and set SampleFormat equal to the equivalent VMR9_SampleFormat flag.
                        if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                        {
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.DisplayModeBobOnly) == 0)
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.ProgressiveFrame;
                            }
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.OneFieldPerSample) != 0)
                            {
                                if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                                {
                                    VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleEven;
                                }
                                else
                                {
                                    VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleOdd;
                                }
                            }
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedEvenFirst;
                            }
                            else
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedOddFirst;
                            }
                        }
                        //InputSampleFreq: This field gives the input frequency, which can be calculated from the AvgTimePerFrame field in the VIDEOINFOHEADER2 structure.
                        //In the general case, set dwNumerator to 10000000, and set dwDenominator to AvgTimePerFrame.
                        VideoDesc.InputSampleFreq.dwDenominator = 10000000;
                        VideoDesc.InputSampleFreq.dwNumerator   = (int)VideoHeader2.AvgTimePerFrame;
                        //OutputFrameFreq: This field gives the output frequency, which can be calculated from the InputSampleFreq value and the interleaving characteristics of the input stream:
                        //Set OutputFrameFreq.dwDenominator equal to InputSampleFreq.dwDenominator.
                        //If the input video is interleaved, set OutputFrameFreq.dwNumerator to 2 x InputSampleFreq.dwNumerator. (After deinterlacing, the frame rate is doubled.)
                        //Otherwise, set the value to InputSampleFreq.dwNumerator.
                        VideoDesc.OutputFrameFreq.dwDenominator = 10000000;
                        VideoDesc.OutputFrameFreq.dwNumerator   = (int)VideoHeader2.AvgTimePerFrame * 2;
                        VideoDesc.dwFourCC = VideoHeader2.BmiHeader.Compression; //dwFourCC: Set this field to pBMI->biCompression.
                        //Pass the structure to the IVMRDeinterlaceControl9::GetNumberOfDeinterlaceModes method.
                        //Call the method twice. The first call returns the number of deinterlace modes the hardware supports for the specified format.
                        hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, null);
                        if (hr == 0 && numModes != 0)
                        {
                            Guid[] modes = new Guid[numModes];
                            {
                                //Allocate an array of GUIDs of this size, and call the method again, passing in the address of the array.
                                //The second call fills the array with GUIDs. Each GUID identifies one deinterlacing mode.
                                hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, modes);
                                for (int i = 0; i < numModes; i++)
                                {
                                    //To get the capabiltiies of a particular mode, call the IVMRDeinterlaceControl9::GetDeinterlaceModeCaps method.
                                    //Pass in the same VMR9VideoDesc structure, along with one of the GUIDs from the array.
                                    //The method fills a VMR9DeinterlaceCaps structure with the mode capabilities.
                                    VMR9DeinterlaceCaps caps = new VMR9DeinterlaceCaps();
                                    caps.dwSize = Marshal.SizeOf(typeof(VMR9DeinterlaceCaps));
                                    hr          = deinterlace.GetDeinterlaceModeCaps(modes[i], ref VideoDesc, ref caps);
                                    if (hr == 0)
                                    {
                                        Log.Debug("VMR9: AvailableDeinterlaceMode - {0}: {1}", i, modes[i]);
                                        switch (caps.DeinterlaceTechnology)
                                        {
                                        //The algorithm is unknown or proprietary
                                        case VMR9DeinterlaceTech.Unknown:
                                        {
                                            Log.Info("VMR9: Unknown H/W de-interlace mode");
                                            break;
                                        }

                                        //The algorithm creates each missing line by repeating the line above it or below it.
                                        //This method creates jagged artifacts and is not recommended.
                                        case VMR9DeinterlaceTech.BOBLineReplicate:
                                        {
                                            Log.Info("VMR9: BOB Line Replicate capable");
                                            break;
                                        }

                                        //The algorithm creates the missing lines by vertically stretching each video field by a factor of two.
                                        //For example, it might average two lines or use a (-1, 9, 9, -1)/16 filter across four lines.
                                        //Slight vertical adjustments are made to ensure that the resulting image does not "bob" up and down
                                        case VMR9DeinterlaceTech.BOBVerticalStretch:
                                        {
                                            Log.Info("VMR9: BOB Vertical Stretch capable");
                                            verticalStretch = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm uses median filtering to recreate the pixels in the missing lines.
                                        case VMR9DeinterlaceTech.MedianFiltering:
                                        {
                                            Log.Info("VMR9: Median Filtering capable");
                                            medianFiltering = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm uses an edge filter to create the missing lines.
                                        //In this process, spatial directional filters are applied to determine the orientation of edges in the picture content.
                                        //Missing pixels are created by filtering along (rather than across) the detected edges.
                                        case VMR9DeinterlaceTech.EdgeFiltering:
                                        {
                                            Log.Info("VMR9: Edge Filtering capable");
                                            break;
                                        }

                                        //The algorithm uses spatial or temporal interpolation, switching between the two on a field-by-field basis, depending on the amount of motion.
                                        case VMR9DeinterlaceTech.FieldAdaptive:
                                        {
                                            Log.Info("VMR9: Field Adaptive capable");
                                            break;
                                        }

                                        //The algorithm uses spatial or temporal interpolation, switching between the two on a pixel-by-pixel basis, depending on the amount of motion.
                                        case VMR9DeinterlaceTech.PixelAdaptive:
                                        {
                                            Log.Info("VMR9: Pixel Adaptive capable");
                                            pixelAdaptive = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm identifies objects within a sequence of video fields.
                                        //Before it recreates the missing pixels, it aligns the movement axes of the individual objects in the scene to make them parallel with the time axis.
                                        case VMR9DeinterlaceTech.MotionVectorSteered:
                                        {
                                            Log.Info("VMR9: Motion Vector Steered capable");
                                            break;
                                        }
                                        }
                                    }
                                }
                            }
                            //Set the MP preferred h/w de-interlace modes in order of quality
                            //pixel adaptive, then median filtering & finally vertical stretch
                            if (pixelAdaptive != "")
                            {
                                Guid DeinterlaceMode = new Guid(pixelAdaptive);
                                Log.Debug("VMR9: trying pixel adaptive");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: pixel adaptive failed!");
                                }
                                else
                                {
                                    Log.Info("VMR9: setting pixel adaptive succeeded");
                                    medianFiltering = "";
                                    verticalStretch = "";
                                }
                            }
                            if (medianFiltering != "")
                            {
                                Guid DeinterlaceMode = new Guid(medianFiltering);
                                Log.Debug("VMR9: trying median filtering");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: median filtering failed!");
                                }
                                else
                                {
                                    Log.Info("VMR9: setting median filtering succeeded");
                                    verticalStretch = "";
                                }
                            }
                            if (verticalStretch != "")
                            {
                                Guid DeinterlaceMode = new Guid(verticalStretch);
                                Log.Debug("VMR9: trying vertical stretch");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: Cannot set H/W de-interlace mode - using VMR9 fallback");
                                }
                                Log.Info("VMR9: setting vertical stretch succeeded");
                            }
                        }
                        else
                        {
                            Log.Info("VMR9: No H/W de-interlaced modes supported, using fallback preference");
                        }
                    }
                    else
                    {
                        Log.Info("VMR9: progressive mode detected - no need to de-interlace");
                    }
                }
                //If the format type is VideoInfo, it must be a progressive frame.
                else
                {
                    Log.Info("VMR9: no need to de-interlace this video source");
                }
                DsUtils.FreeAMMediaType(mediatype);
                //release the VMR9 pin
                hr = DirectShowUtil.ReleaseComObject(InPin);

                InPin     = null;
                mediatype = null;
            }
        }
        void PreviewInit()
        {
            m_dvdNav = (IBaseFilter)new DVDNavigator();
            m_dvdCtrl = m_dvdNav as IDvdControl2;
            int hr = m_dvdCtrl.SetDVDDirectory(Disk.VIDEO_TS);
            DsError.ThrowExceptionForHR(hr);

            m_dvdInfo = m_dvdCtrl as IDvdInfo2;

            m_filterGraph = (IGraphBuilder)new FilterGraph();
            hr = m_filterGraph.AddFilter(m_dvdNav, "DVD Navigator");
            DsError.ThrowExceptionForHR(hr);

            m_renderer = (IBaseFilter)new VideoMixingRenderer9();
            IVMRFilterConfig9 filterConfig = (IVMRFilterConfig9)m_renderer;

            hr = filterConfig.SetRenderingMode(VMR9Mode.Renderless);
            DsError.ThrowExceptionForHR(hr);

            hr = filterConfig.SetNumberOfStreams(1);
            DsError.ThrowExceptionForHR(hr);

            hr = m_filterGraph.AddFilter(m_renderer, "Video Mix 9");
            DsError.ThrowExceptionForHR(hr);

            IPin videoPin;
            hr = m_dvdNav.FindPin("Video", out videoPin);
            DsError.ThrowExceptionForHR(hr);

            IPin audioPin;
            hr = m_dvdNav.FindPin("AC3", out audioPin);
            DsError.ThrowExceptionForHR(hr);

            //hr = m_filterGraph.Render(videoPin);
            //DsError.ThrowExceptionForHR(hr);
            //hr = m_filterGraph.Render(audioPin);
            //DsError.ThrowExceptionForHR(hr);

            //IMediaControl mediaCtrl = (IMediaControl)m_filterGraph;

            //hr = mediaCtrl.Run();
            //DsError.ThrowExceptionForHR(hr);

            //hr = m_dvdCtrl.SetOption(DvdOptionFlag.EnableNonblockingAPIs, true);
            //DsError.ThrowExceptionForHR(hr);
            //m_dvdCtrl.SetOption(DvdOptionFlag.ResetOnStop, true);
        }
示例#20
0
        public static void tオーディオレンダラをNullレンダラに変えてフォーマットを取得する(IGraphBuilder graphBuilder, out WaveFormat wfx, out byte[] wfx拡張データ)
        {
            int hr = 0;

            IBaseFilter audioRenderer              = null;
            IPin        rendererInputPin           = null;
            IPin        rendererConnectedOutputPin = null;
            IBaseFilter nullRenderer         = null;
            IPin        nullRendererInputPin = null;

            wfx      = null;
            wfx拡張データ = new byte[0];

            try
            {
                // audioRenderer を探す。

                audioRenderer = CDirectShow.tオーディオレンダラを探して返す(graphBuilder);
                if (audioRenderer == null)
                {
                    return;                             // なかった
                }
                #region [ 音量ゼロで一度再生する。(オーディオレンダラの入力ピンMediaTypeが、接続時とは異なる「正しいもの」に変わる可能性があるため。)]
                //-----------------
                {
                    // ここに来た時点で、グラフのビデオレンダラは無効化(NullRendererへの置換や除去など)しておくこと。
                    // さもないと、StopWhenReady() 時に一瞬だけ Activeウィンドウが表示されてしまう。

                    var mediaCtrl  = (IMediaControl)graphBuilder;
                    var basicAudio = (IBasicAudio)graphBuilder;

                    basicAudio.put_Volume(-10000);                              // 最小音量


                    // グラフを再生してすぐ止める。(Paused → Stopped へ遷移する)

                    mediaCtrl.StopWhenReady();


                    // グラフが Stopped に遷移完了するまで待つ。(StopWhenReady() はグラフが Stopped になるのを待たずに帰ってくる。)

                    FilterState fs = FilterState.Paused;
                    hr = CWin32.S_FALSE;
                    while (fs != FilterState.Stopped || hr != CWin32.S_OK)
                    {
                        hr = mediaCtrl.GetState(10, out fs);
                    }


                    // 終了処理。

                    basicAudio.put_Volume(0);                                           // 最大音量

                    basicAudio = null;
                    mediaCtrl  = null;
                }
                //-----------------
                #endregion

                // audioRenderer の入力ピンを探す。

                rendererInputPin = t最初の入力ピンを探して返す(audioRenderer);
                if (rendererInputPin == null)
                {
                    return;
                }


                // WAVEフォーマットを取得し、wfx 引数へ格納する。

                var type = new AMMediaType();
                hr = rendererInputPin.ConnectionMediaType(type);
                DsError.ThrowExceptionForHR(hr);
                try
                {
                    wfx = new WaveFormat();

                    #region [ type.formatPtr から wfx に、拡張領域を除くデータをコピーする。]
                    //-----------------
                    var wfxTemp = new WaveFormatEx();                           // SharpDX.Multimedia.WaveFormat は Marshal.PtrToStructure() で使えないので、それが使える DirectShowLib.WaveFormatEx を介して取得する。(面倒…)
                    Marshal.PtrToStructure(type.formatPtr, (object)wfxTemp);

                    wfx = WaveFormat.CreateCustomFormat((WaveFormatEncoding)wfxTemp.wFormatTag, wfxTemp.nSamplesPerSec, wfxTemp.nChannels, wfxTemp.nAvgBytesPerSec, wfxTemp.nBlockAlign, wfxTemp.wBitsPerSample);
                    //-----------------
                    #endregion
                    #region [ 拡張領域が存在するならそれを wfx拡張データ に格納する。 ]
                    //-----------------
                    int nWaveFormatEx本体サイズ = 16 + 2;                     // sizeof( WAVEFORMAT ) + sizof( WAVEFORMATEX.cbSize )
                    int nはみ出しサイズbyte       = type.formatSize - nWaveFormatEx本体サイズ;

                    if (nはみ出しサイズbyte > 0)
                    {
                        wfx拡張データ = new byte[nはみ出しサイズbyte];
                        var hGC = GCHandle.Alloc(wfx拡張データ, GCHandleType.Pinned);                                // 動くなよー
                        unsafe
                        {
                            byte *src = (byte *)type.formatPtr.ToPointer();
                            byte *dst = (byte *)hGC.AddrOfPinnedObject().ToPointer();
                            CWin32.CopyMemory(dst, src + nWaveFormatEx本体サイズ, (uint)nはみ出しサイズbyte);
                        }
                        hGC.Free();
                    }
                    //-----------------
                    #endregion
                }
                finally
                {
                    if (type != null)
                    {
                        DsUtils.FreeAMMediaType(type);
                    }
                }


                // audioRenderer につながる出力ピンを探す。

                hr = rendererInputPin.ConnectedTo(out rendererConnectedOutputPin);
                DsError.ThrowExceptionForHR(hr);


                // audioRenderer をグラフから切断する。

                rendererInputPin.Disconnect();
                rendererConnectedOutputPin.Disconnect();


                // audioRenderer をグラフから除去する。

                hr = graphBuilder.RemoveFilter(audioRenderer);
                DsError.ThrowExceptionForHR(hr);


                // nullRenderer を作成し、グラフに追加する。

                nullRenderer = (IBaseFilter) new NullRenderer();
                hr           = graphBuilder.AddFilter(nullRenderer, "Audio Null Renderer");
                DsError.ThrowExceptionForHR(hr);


                // nullRenderer の入力ピンを探す。

                hr = nullRenderer.FindPin("In", out nullRendererInputPin);
                DsError.ThrowExceptionForHR(hr);


                // nullRenderer をグラフに接続する。

                hr = rendererConnectedOutputPin.Connect(nullRendererInputPin, null);
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                CCommon.tReleaseComObject(ref nullRendererInputPin);
                CCommon.tReleaseComObject(ref nullRenderer);
                CCommon.tReleaseComObject(ref rendererConnectedOutputPin);
                CCommon.tReleaseComObject(ref rendererInputPin);
                CCommon.tReleaseComObject(ref audioRenderer);
            }
        }