예제 #1
0
        /// <summary>
        /// Sets the number of DVD buttons found in the current DVD video
        /// </summary>
        /// <param name="buttonCount">The total number of buttons</param>
        private void SetDvdButtonCount(int buttonCount)
        {
            m_dvdButtonCount = buttonCount;

            if (m_dvdButtonCount == 0)
            {
                m_isOverButton = false;
                InvokeOnOverDvdButton(m_isOverButton);
            }

            var mediaType = new AMMediaType();

            m_dummyRendererPin.ConnectionMediaType(mediaType);

            /* Check to see if its a video media type */
            if (mediaType.formatType != FormatType.VideoInfo2 &&
                mediaType.formatType != FormatType.VideoInfo)
            {
                DsUtils.FreeAMMediaType(mediaType);
                return;
            }

            var videoInfo = new VideoInfoHeader();

            /* Read the video info header struct from the native pointer */
            Marshal.PtrToStructure(mediaType.formatPtr, videoInfo);

            /* Get the target rect */
            m_renderTargetRect = videoInfo.TargetRect.ToRectangle();

            DsUtils.FreeAMMediaType(mediaType);
        }
예제 #2
0
        private void TestDynamicQueryAccept()
        {
            int hr;

            IPin        iPin = m_pc as IPin;
            AMMediaType pmt  = new AMMediaType();

            hr = iPin.ConnectionMediaType(pmt);
            DsError.ThrowExceptionForHR(hr);

            hr = m_pc.DynamicQueryAccept(pmt);
            DsError.ThrowExceptionForHR(hr);
        }
예제 #3
0
        private void logMediaTypes(IPin pin)
        {
            IEnumMediaTypes mediaTypes = null;

            AMMediaType[] mediaType = new AMMediaType[1];

            AMMediaType connectedMediaType = new AMMediaType();

            reply = pin.ConnectionMediaType(connectedMediaType);

            reply = pin.EnumMediaTypes(out mediaTypes);
            if (reply != 0)
            {
                LogMessage("Media types cannot be determined at this time (not connected yet?)");
                return;
            }

            while (mediaTypes.Next(mediaType.Length, mediaType, IntPtr.Zero) == 0)
            {
                foreach (AMMediaType currentMediaType in mediaType)
                {
                    PinInfo pinInfo;
                    reply = pin.QueryPinInfo(out pinInfo);
                    DsError.ThrowExceptionForHR(reply);

                    string majorType = TranslateMediaMajorType(currentMediaType.majorType);
                    string subType   = TranslateMediaSubType(currentMediaType.subType);

                    string connectedComment;

                    if (currentMediaType.majorType == connectedMediaType.majorType && currentMediaType.subType == connectedMediaType.subType)
                    {
                        connectedComment = "** Connected **";
                    }
                    else
                    {
                        connectedComment = string.Empty;
                    }

                    LogMessage("Media type: " +
                               majorType + " ; " +
                               subType + " " +
                               currentMediaType.fixedSizeSamples + " " +
                               currentMediaType.sampleSize + " " +
                               connectedComment);
                }
            }
        }
예제 #4
0
        /// <summary>
        /// Gets the video resolution of a pin on a renderer.
        /// </summary>
        /// <param name="renderer">The renderer to inspect</param>
        /// <param name="direction">The direction the pin is</param>
        /// <param name="pinIndex">The zero based index of the pin to inspect</param>
        /// <returns>If successful a video resolution is returned.  If not, a 0x0 size is returned</returns>
        protected static Size GetVideoSize(IBaseFilter renderer, PinDirection direction, int pinIndex)
        {
            var size = new Size();

            var  mediaType = new AMMediaType();
            IPin pin       = DsFindPin.ByDirection(renderer, direction, pinIndex);

            if (pin == null)
            {
                goto done;
            }

            int hr = pin.ConnectionMediaType(mediaType);

            if (hr != 0)
            {
                goto done;
            }

            /* Check to see if its a video media type */
            if (mediaType.formatType != FormatType.VideoInfo2 &&
                mediaType.formatType != FormatType.VideoInfo)
            {
                goto done;
            }

            var videoInfo = new VideoInfoHeader();

            /* Read the video info header struct from the native pointer */
            Marshal.PtrToStructure(mediaType.formatPtr, videoInfo);

            Rectangle rect = videoInfo.SrcRect.ToRectangle();

            size = new Size(rect.Width, rect.Height);

done:
            DsUtils.FreeAMMediaType(mediaType);

            if (pin != null)
            {
                Marshal.ReleaseComObject(pin);
            }
            return(size);
        }
예제 #5
0
        /// <summary>
        /// Get the IPinIPType for the pin's prefered or connected media type
        /// </summary>
        /// <param name="pin"></param>
        /// <returns></returns>
        private Type GetPinMajorMediaType(IPin pin)
        {
            Type        t       = null;
            AMMediaType contype = new AMMediaType();
            int         hr      = pin.ConnectionMediaType(contype);

            if (hr == 0)
            {
                t = PinDataTypes.GetMajorPinType(contype.majorType);
                DsUtils.FreeAMMediaType(contype);
                return(t);
            }
            else
            {
                // wasn't connected, enumerate the prefered media types and get the major type of the first one
                IEnumMediaTypes penum = null;
                hr = pin.EnumMediaTypes(out penum);
                if (hr == 0 && penum != null)
                {
                    AMMediaType[] mtypes  = new AMMediaType[1];
                    IntPtr        fetched = Marshal.AllocCoTaskMem(4);
                    try
                    {
                        if (penum.Next(1, mtypes, fetched) == 0)
                        {
                            t = PinDataTypes.GetMajorPinType(mtypes[0].majorType);
                            DsUtils.FreeAMMediaType(mtypes[0]);
                            Marshal.ReleaseComObject(penum);
                            return(t);
                        }
                    }
                    finally
                    {
                        Marshal.FreeCoTaskMem(fetched);
                    }
                }
            }

            // couldn't get the pin's major media type
            return(typeof(PinDataTypes.Unknown));
        }
예제 #6
0
        private IFilterGraph2 BuildGraph(string sFileName)
        {
            int           hr;
            IBaseFilter   ibfRenderer  = null;
            IBaseFilter   ibfAVISource = null;
            IPin          IPinIn       = null;
            IPin          IPinOut      = null;
            IPin          iSampleIn    = null;
            IPin          iSampleOut   = null;
            SampleGrabber sg           = null;

            IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2;

            try
            {
                // Get the file source filter
                ibfAVISource = new AsyncReader() as IBaseFilter;

                // Add it to the graph
                hr = graphBuilder.AddFilter(ibfAVISource, "Ds.NET AsyncReader");
                Marshal.ThrowExceptionForHR(hr);

                // Set the file name
                IFileSourceFilter fsf = ibfAVISource as IFileSourceFilter;
                hr = fsf.Load(sFileName, null);
                Marshal.ThrowExceptionForHR(hr);

                IPinOut = DsFindPin.ByDirection(ibfAVISource, PinDirection.Output, 0);

                // Get a SampleGrabber
                sg = new SampleGrabber();
                IBaseFilter    grabFilt = sg as IBaseFilter;
                ISampleGrabber isg      = sg as ISampleGrabber;

                // Add the sample grabber to the graph
                hr = graphBuilder.AddFilter(grabFilt, "Ds.NET SampleGrabber");
                Marshal.ThrowExceptionForHR(hr);

                iSampleIn  = DsFindPin.ByDirection(grabFilt, PinDirection.Input, 0);
                iSampleOut = DsFindPin.ByDirection(grabFilt, PinDirection.Output, 0);

                // Get the default video renderer
                ibfRenderer = (IBaseFilter) new VideoRendererDefault();

                // Add it to the graph
                hr = graphBuilder.AddFilter(ibfRenderer, "Ds.NET VideoRendererDefault");
                Marshal.ThrowExceptionForHR(hr);
                IPinIn = DsFindPin.ByDirection(ibfRenderer, PinDirection.Input, 0);

                // Connect the file to the sample grabber
                hr = graphBuilder.Connect(IPinOut, iSampleIn);
                Marshal.ThrowExceptionForHR(hr);

                // Connect the sample grabber to the renderer
                hr = graphBuilder.Connect(iSampleOut, IPinIn);
                Marshal.ThrowExceptionForHR(hr);

                // Configure the sample grabber
                ConfigureSampleGrabber(isg);

                // Grab a copy of the mediatype being used.  Needed
                // in one of the tests
                m_MediaType = new AMMediaType();
                hr          = IPinOut.ConnectionMediaType(m_MediaType);
                Marshal.ThrowExceptionForHR(hr);
            }
            catch
            {
                Marshal.ReleaseComObject(graphBuilder);
                throw;
            }
            finally
            {
                Marshal.ReleaseComObject(ibfRenderer);
                Marshal.ReleaseComObject(ibfAVISource);
                Marshal.ReleaseComObject(IPinIn);
                Marshal.ReleaseComObject(IPinOut);
                Marshal.ReleaseComObject(iSampleIn);
                Marshal.ReleaseComObject(iSampleOut);
                Marshal.ReleaseComObject(sg);
            }

            return(graphBuilder);
        }
        // this method is an implementation of the procedure describe in this page :
        // http://msdn.microsoft.com/library/en-us/directshow/htm/settingdeinterlacepreferences.asp?frame=true
        private VMRVideoDesc GetVideoDesc7()
        {
            int          hr        = 0;
            AMMediaType  mediaType = new AMMediaType();
            VMRVideoDesc vDesc     = new VMRVideoDesc();

            vDesc.dwSize = Marshal.SizeOf(typeof(VMRVideoDesc));

            IPin pinIn = DsFindPin.ByDirection(vmr, PinDirection.Input, 0);

            hr = pinIn.ConnectionMediaType(mediaType);
            DsError.ThrowExceptionForHR(hr);

            Marshal.ReleaseComObject(pinIn);

            if (mediaType.formatType == FormatType.VideoInfo2)
            {
                VideoInfoHeader2 videoHeader = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader2));
                if ((videoHeader.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                {
                    vDesc.dwSampleWidth        = videoHeader.BmiHeader.Width;
                    vDesc.dwSampleHeight       = videoHeader.BmiHeader.Height;
                    vDesc.SingleFieldPerSample = ((videoHeader.InterlaceFlags & AMInterlace.OneFieldPerSample) != 0);
                    vDesc.dwFourCC             = videoHeader.BmiHeader.Compression;

                    switch (videoHeader.AvgTimePerFrame)
                    {
                    case 166833:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 60000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 333667:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 30000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 333666: // this value is not define in the paper but is returned by testme.iso
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 30000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 417188:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 24000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 200000:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 50;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    case 400000:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 25;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    case 416667:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 24;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    default:
                    {
                        throw new ApplicationException("Unknown AvgTimePerFrame : " + videoHeader.AvgTimePerFrame);
                    }
                    }

                    // Video is interleaved
                    vDesc.OutputFrameFreq.dwNumerator   = vDesc.InputSampleFreq.dwNumerator * 2;
                    vDesc.OutputFrameFreq.dwDenominator = vDesc.InputSampleFreq.dwDenominator;
                }
                else
                {
                    throw new ApplicationException("Only interlaced formats");
                }
            }
            else
            {
                throw new ApplicationException("Only VIDEOINFOHEADER2 formats can be interlaced");
            }

            DsUtils.FreeAMMediaType(mediaType);
            return(vDesc);
        }
예제 #8
0
        DSStreamResultCodes InitWithVideoFile(WTVStreamingVideoRequest strq)
        {
            UsingSBEFilter = false;  // Not using stream buffer

            // Init variables
            IPin[]   pin             = new IPin[1];
            string   dPin            = string.Empty;
            string   sName           = string.Empty;
            string   dName           = string.Empty;
            string   sPin            = string.Empty;
            FileInfo fiInputFile     = new FileInfo(strq.FileName);
            string   txtOutputFNPath = fiInputFile.FullName + ".wmv";

            if (
                (fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) ||
                (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                )
            {
                return(DSStreamResultCodes.ErrorInvalidFileType);
            }

            int hr = 0;

            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object", 0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter);                            // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter; // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                SendDebugMessage("Setting filename", 0);
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string          destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Handy to have an ACM Wrapper filter hanging around for AVI files with MP3 audio
                SendDebugMessage("Adding ACM Wrapper", 0);
                IBaseFilter ACMFilter = FilterDefinition.AddToFilterGraph(FilterDefinitions.Other.ACMWrapperFilter, ref graphbuilder);
                dc.Add(ACMFilter);

                // Render file - then build graph
                SendDebugMessage("Rendering file", 0);
                graphbuilder.RenderFile(fiInputFile.FullName, null);
                SendDebugMessage("Saving graph", 0);
                FilterGraphTools.SaveGraphFile(graphbuilder, "C:\\ProgramData\\RemotePotato\\lastfiltergraph.grf");

                // Are both our ASF pins connected?
                IPin ASFVidInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                IPin ASFAudInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent   tempEvent   = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);

                // Get media type from vid input pin for ASF writer
                AMMediaType pmt = new AMMediaType();
                hr = ASFVidInputPin.ConnectionMediaType(pmt);

                FrameSize SourceFrameSize = null;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    SourceFrameSize = new FrameSize(pvih2.BmiHeader.Width, pvih2.BmiHeader.Height);
                }
                else if (pmt.formatType == FormatType.VideoInfo)  //{05589f80-c356-11ce-bf01-00aa0055595a}
                {
                    VideoInfoHeader pvih = new VideoInfoHeader();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih);
                    SourceFrameSize = new FrameSize(pvih.BmiHeader.Width, pvih.BmiHeader.Height);
                }
                else
                {
                    SourceFrameSize = new FrameSize(200, 200); // SQUARE
                }
                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS != FilterState.Stopped)
                {
                    DsError.ThrowExceptionForHR(tempControl.Stop());
                }
                // Free up media type
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // (re)Configure the ASF writer with the selected WM Profile
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                // source
                Marshal.ReleaseComObject(ASFVidInputPin); ASFVidInputPin = null;
                Marshal.ReleaseComObject(ASFAudInputPin); ASFAudInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return(DSStreamResultCodes.ErrorExceptionOccurred);
            }

            return(DSStreamResultCodes.OK);
        }
예제 #9
0
        public void SetDeinterlaceMode()
        {
            if (!GUIGraphicsContext.IsEvr)
            {
                if (!_isVmr9Initialized)
                {
                    return;
                }
                Log.Debug("VMR9: SetDeinterlaceMode()");
                IVMRDeinterlaceControl9 deinterlace = (IVMRDeinterlaceControl9)_vmr9Filter;
                IPin InPin = null;
                int  hr    = _vmr9Filter.FindPin("VMR Input0", out InPin);
                if (hr != 0)
                {
                    Log.Error("VMR9: failed finding InPin {0:X}", hr);
                }
                AMMediaType mediatype = new AMMediaType();
                InPin.ConnectionMediaType(mediatype);
                //Start by getting the media type of the video stream.
                //Only VideoInfoHeader2 formats can be interlaced.
                if (mediatype.formatType == FormatType.VideoInfo2)
                {
                    Log.Debug("VMR9: SetDeinterlaceMode - FormatType = VideoInfo2");
                    int numModes = 0;
                    VideoInfoHeader2 VideoHeader2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(mediatype.formatPtr, VideoHeader2);
                    VMR9VideoDesc VideoDesc = new VMR9VideoDesc();
                    // If the FormatType is VideoInfo2, check the dwInterlaceFlags field for the AMInterlace.IsInterlaced flag.
                    //The presence of this flag indicates the video is interlaced.
                    if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                    {
                        Log.Debug("VMR9: SetDeinterlaceMode - Interlaced frame detected");
                        //Fill in the VMR9VideoDesc structure with a description of the video stream.
                        VideoDesc.dwSize         = Marshal.SizeOf(VideoDesc);    // dwSize: Set this field to sizeof(VMR9VideoDesc).
                        VideoDesc.dwSampleWidth  = VideoHeader2.BmiHeader.Width; // dwSampleWidth: Set this field to pBMI->biWidth.
                        VideoDesc.dwSampleHeight = VideoHeader2.BmiHeader.Height;
                        // dwSampleHeight: Set this field to abs(pBMI->biHeight).
                        //SampleFormat: This field describes the interlace characteristics of the media type.
                        //Check the dwInterlaceFlags field in the VIDEOINFOHEADER2 structure, and set SampleFormat equal to the equivalent VMR9_SampleFormat flag.
                        if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                        {
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.DisplayModeBobOnly) == 0)
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.ProgressiveFrame;
                            }
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.OneFieldPerSample) != 0)
                            {
                                if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                                {
                                    VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleEven;
                                }
                                else
                                {
                                    VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleOdd;
                                }
                            }
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedEvenFirst;
                            }
                            else
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedOddFirst;
                            }
                        }
                        //InputSampleFreq: This field gives the input frequency, which can be calculated from the AvgTimePerFrame field in the VIDEOINFOHEADER2 structure.
                        //In the general case, set dwNumerator to 10000000, and set dwDenominator to AvgTimePerFrame.
                        VideoDesc.InputSampleFreq.dwDenominator = 10000000;
                        VideoDesc.InputSampleFreq.dwNumerator   = (int)VideoHeader2.AvgTimePerFrame;
                        //OutputFrameFreq: This field gives the output frequency, which can be calculated from the InputSampleFreq value and the interleaving characteristics of the input stream:
                        //Set OutputFrameFreq.dwDenominator equal to InputSampleFreq.dwDenominator.
                        //If the input video is interleaved, set OutputFrameFreq.dwNumerator to 2 x InputSampleFreq.dwNumerator. (After deinterlacing, the frame rate is doubled.)
                        //Otherwise, set the value to InputSampleFreq.dwNumerator.
                        VideoDesc.OutputFrameFreq.dwDenominator = 10000000;
                        VideoDesc.OutputFrameFreq.dwNumerator   = (int)VideoHeader2.AvgTimePerFrame * 2;
                        VideoDesc.dwFourCC = VideoHeader2.BmiHeader.Compression; //dwFourCC: Set this field to pBMI->biCompression.
                        //Pass the structure to the IVMRDeinterlaceControl9::GetNumberOfDeinterlaceModes method.
                        //Call the method twice. The first call returns the number of deinterlace modes the hardware supports for the specified format.
                        hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, null);
                        if (hr == 0 && numModes != 0)
                        {
                            Guid[] modes = new Guid[numModes];
                            {
                                //Allocate an array of GUIDs of this size, and call the method again, passing in the address of the array.
                                //The second call fills the array with GUIDs. Each GUID identifies one deinterlacing mode.
                                hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, modes);
                                for (int i = 0; i < numModes; i++)
                                {
                                    //To get the capabiltiies of a particular mode, call the IVMRDeinterlaceControl9::GetDeinterlaceModeCaps method.
                                    //Pass in the same VMR9VideoDesc structure, along with one of the GUIDs from the array.
                                    //The method fills a VMR9DeinterlaceCaps structure with the mode capabilities.
                                    VMR9DeinterlaceCaps caps = new VMR9DeinterlaceCaps();
                                    caps.dwSize = Marshal.SizeOf(typeof(VMR9DeinterlaceCaps));
                                    hr          = deinterlace.GetDeinterlaceModeCaps(modes[i], ref VideoDesc, ref caps);
                                    if (hr == 0)
                                    {
                                        Log.Debug("VMR9: AvailableDeinterlaceMode - {0}: {1}", i, modes[i]);
                                        switch (caps.DeinterlaceTechnology)
                                        {
                                        //The algorithm is unknown or proprietary
                                        case VMR9DeinterlaceTech.Unknown:
                                        {
                                            Log.Info("VMR9: Unknown H/W de-interlace mode");
                                            break;
                                        }

                                        //The algorithm creates each missing line by repeating the line above it or below it.
                                        //This method creates jagged artifacts and is not recommended.
                                        case VMR9DeinterlaceTech.BOBLineReplicate:
                                        {
                                            Log.Info("VMR9: BOB Line Replicate capable");
                                            break;
                                        }

                                        //The algorithm creates the missing lines by vertically stretching each video field by a factor of two.
                                        //For example, it might average two lines or use a (-1, 9, 9, -1)/16 filter across four lines.
                                        //Slight vertical adjustments are made to ensure that the resulting image does not "bob" up and down
                                        case VMR9DeinterlaceTech.BOBVerticalStretch:
                                        {
                                            Log.Info("VMR9: BOB Vertical Stretch capable");
                                            verticalStretch = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm uses median filtering to recreate the pixels in the missing lines.
                                        case VMR9DeinterlaceTech.MedianFiltering:
                                        {
                                            Log.Info("VMR9: Median Filtering capable");
                                            medianFiltering = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm uses an edge filter to create the missing lines.
                                        //In this process, spatial directional filters are applied to determine the orientation of edges in the picture content.
                                        //Missing pixels are created by filtering along (rather than across) the detected edges.
                                        case VMR9DeinterlaceTech.EdgeFiltering:
                                        {
                                            Log.Info("VMR9: Edge Filtering capable");
                                            break;
                                        }

                                        //The algorithm uses spatial or temporal interpolation, switching between the two on a field-by-field basis, depending on the amount of motion.
                                        case VMR9DeinterlaceTech.FieldAdaptive:
                                        {
                                            Log.Info("VMR9: Field Adaptive capable");
                                            break;
                                        }

                                        //The algorithm uses spatial or temporal interpolation, switching between the two on a pixel-by-pixel basis, depending on the amount of motion.
                                        case VMR9DeinterlaceTech.PixelAdaptive:
                                        {
                                            Log.Info("VMR9: Pixel Adaptive capable");
                                            pixelAdaptive = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm identifies objects within a sequence of video fields.
                                        //Before it recreates the missing pixels, it aligns the movement axes of the individual objects in the scene to make them parallel with the time axis.
                                        case VMR9DeinterlaceTech.MotionVectorSteered:
                                        {
                                            Log.Info("VMR9: Motion Vector Steered capable");
                                            break;
                                        }
                                        }
                                    }
                                }
                            }
                            //Set the MP preferred h/w de-interlace modes in order of quality
                            //pixel adaptive, then median filtering & finally vertical stretch
                            if (pixelAdaptive != "")
                            {
                                Guid DeinterlaceMode = new Guid(pixelAdaptive);
                                Log.Debug("VMR9: trying pixel adaptive");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: pixel adaptive failed!");
                                }
                                else
                                {
                                    Log.Info("VMR9: setting pixel adaptive succeeded");
                                    medianFiltering = "";
                                    verticalStretch = "";
                                }
                            }
                            if (medianFiltering != "")
                            {
                                Guid DeinterlaceMode = new Guid(medianFiltering);
                                Log.Debug("VMR9: trying median filtering");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: median filtering failed!");
                                }
                                else
                                {
                                    Log.Info("VMR9: setting median filtering succeeded");
                                    verticalStretch = "";
                                }
                            }
                            if (verticalStretch != "")
                            {
                                Guid DeinterlaceMode = new Guid(verticalStretch);
                                Log.Debug("VMR9: trying vertical stretch");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: Cannot set H/W de-interlace mode - using VMR9 fallback");
                                }
                                Log.Info("VMR9: setting vertical stretch succeeded");
                            }
                        }
                        else
                        {
                            Log.Info("VMR9: No H/W de-interlaced modes supported, using fallback preference");
                        }
                    }
                    else
                    {
                        Log.Info("VMR9: progressive mode detected - no need to de-interlace");
                    }
                }
                //If the format type is VideoInfo, it must be a progressive frame.
                else
                {
                    Log.Info("VMR9: no need to de-interlace this video source");
                }
                DsUtils.FreeAMMediaType(mediatype);
                //release the VMR9 pin
                hr = DirectShowUtil.ReleaseComObject(InPin);

                InPin     = null;
                mediatype = null;
            }
        }
예제 #10
0
        private void logMediaTypes(IPin pin)
        {
            IEnumMediaTypes mediaTypes = null;
            AMMediaType[] mediaType = new AMMediaType[1];

            AMMediaType connectedMediaType = new AMMediaType();
            reply = pin.ConnectionMediaType(connectedMediaType);

            reply = pin.EnumMediaTypes(out mediaTypes);
            if (reply != 0)
            {
                LogMessage("Media types cannot be determined at this time (not connected yet?)");
                return;
            }

            while (mediaTypes.Next(mediaType.Length, mediaType, IntPtr.Zero) == 0)
            {
                foreach (AMMediaType currentMediaType in mediaType)
                {
                    PinInfo pinInfo;
                    reply = pin.QueryPinInfo(out pinInfo);
                    DsError.ThrowExceptionForHR(reply);

                    string majorType = TranslateMediaMajorType(currentMediaType.majorType);
                    string subType = TranslateMediaSubType(currentMediaType.subType);

                    string connectedComment;

                    if (currentMediaType.majorType == connectedMediaType.majorType && currentMediaType.subType == connectedMediaType.subType)
                        connectedComment = "** Connected **";
                    else
                        connectedComment = string.Empty;

                    LogMessage("Media type: " +
                        majorType + " ; " +
                        subType + " " +
                        currentMediaType.fixedSizeSamples + " " +
                        currentMediaType.sampleSize + " " +
                        connectedComment);
                }
            }
        }
예제 #11
0
        public static void tオーディオレンダラをNullレンダラに変えてフォーマットを取得する(IGraphBuilder graphBuilder, out WaveFormat wfx, out byte[] wfx拡張データ)
        {
            int hr = 0;

            IBaseFilter audioRenderer              = null;
            IPin        rendererInputPin           = null;
            IPin        rendererConnectedOutputPin = null;
            IBaseFilter nullRenderer         = null;
            IPin        nullRendererInputPin = null;

            wfx      = null;
            wfx拡張データ = new byte[0];

            try
            {
                // audioRenderer を探す。

                audioRenderer = CDirectShow.tオーディオレンダラを探して返す(graphBuilder);
                if (audioRenderer == null)
                {
                    return;                             // なかった
                }
                #region [ 音量ゼロで一度再生する。(オーディオレンダラの入力ピンMediaTypeが、接続時とは異なる「正しいもの」に変わる可能性があるため。)]
                //-----------------
                {
                    // ここに来た時点で、グラフのビデオレンダラは無効化(NullRendererへの置換や除去など)しておくこと。
                    // さもないと、StopWhenReady() 時に一瞬だけ Activeウィンドウが表示されてしまう。

                    var mediaCtrl  = (IMediaControl)graphBuilder;
                    var basicAudio = (IBasicAudio)graphBuilder;

                    basicAudio.put_Volume(-10000);                              // 最小音量


                    // グラフを再生してすぐ止める。(Paused → Stopped へ遷移する)

                    mediaCtrl.StopWhenReady();


                    // グラフが Stopped に遷移完了するまで待つ。(StopWhenReady() はグラフが Stopped になるのを待たずに帰ってくる。)

                    FilterState fs = FilterState.Paused;
                    hr = CWin32.S_FALSE;
                    while (fs != FilterState.Stopped || hr != CWin32.S_OK)
                    {
                        hr = mediaCtrl.GetState(10, out fs);
                    }


                    // 終了処理。

                    basicAudio.put_Volume(0);                                           // 最大音量

                    basicAudio = null;
                    mediaCtrl  = null;
                }
                //-----------------
                #endregion

                // audioRenderer の入力ピンを探す。

                rendererInputPin = t最初の入力ピンを探して返す(audioRenderer);
                if (rendererInputPin == null)
                {
                    return;
                }


                // WAVEフォーマットを取得し、wfx 引数へ格納する。

                var type = new AMMediaType();
                hr = rendererInputPin.ConnectionMediaType(type);
                DsError.ThrowExceptionForHR(hr);
                try
                {
                    wfx = new WaveFormat();

                    #region [ type.formatPtr から wfx に、拡張領域を除くデータをコピーする。]
                    //-----------------
                    var wfxTemp = new WaveFormatEx();                           // SharpDX.Multimedia.WaveFormat は Marshal.PtrToStructure() で使えないので、それが使える DirectShowLib.WaveFormatEx を介して取得する。(面倒…)
                    Marshal.PtrToStructure(type.formatPtr, (object)wfxTemp);

                    wfx = WaveFormat.CreateCustomFormat((WaveFormatEncoding)wfxTemp.wFormatTag, wfxTemp.nSamplesPerSec, wfxTemp.nChannels, wfxTemp.nAvgBytesPerSec, wfxTemp.nBlockAlign, wfxTemp.wBitsPerSample);
                    //-----------------
                    #endregion
                    #region [ 拡張領域が存在するならそれを wfx拡張データ に格納する。 ]
                    //-----------------
                    int nWaveFormatEx本体サイズ = 16 + 2;                     // sizeof( WAVEFORMAT ) + sizof( WAVEFORMATEX.cbSize )
                    int nはみ出しサイズbyte       = type.formatSize - nWaveFormatEx本体サイズ;

                    if (nはみ出しサイズbyte > 0)
                    {
                        wfx拡張データ = new byte[nはみ出しサイズbyte];
                        var hGC = GCHandle.Alloc(wfx拡張データ, GCHandleType.Pinned);                                // 動くなよー
                        unsafe
                        {
                            byte *src = (byte *)type.formatPtr.ToPointer();
                            byte *dst = (byte *)hGC.AddrOfPinnedObject().ToPointer();
                            CWin32.CopyMemory(dst, src + nWaveFormatEx本体サイズ, (uint)nはみ出しサイズbyte);
                        }
                        hGC.Free();
                    }
                    //-----------------
                    #endregion
                }
                finally
                {
                    if (type != null)
                    {
                        DsUtils.FreeAMMediaType(type);
                    }
                }


                // audioRenderer につながる出力ピンを探す。

                hr = rendererInputPin.ConnectedTo(out rendererConnectedOutputPin);
                DsError.ThrowExceptionForHR(hr);


                // audioRenderer をグラフから切断する。

                rendererInputPin.Disconnect();
                rendererConnectedOutputPin.Disconnect();


                // audioRenderer をグラフから除去する。

                hr = graphBuilder.RemoveFilter(audioRenderer);
                DsError.ThrowExceptionForHR(hr);


                // nullRenderer を作成し、グラフに追加する。

                nullRenderer = (IBaseFilter) new NullRenderer();
                hr           = graphBuilder.AddFilter(nullRenderer, "Audio Null Renderer");
                DsError.ThrowExceptionForHR(hr);


                // nullRenderer の入力ピンを探す。

                hr = nullRenderer.FindPin("In", out nullRendererInputPin);
                DsError.ThrowExceptionForHR(hr);


                // nullRenderer をグラフに接続する。

                hr = rendererConnectedOutputPin.Connect(nullRendererInputPin, null);
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                CCommon.tReleaseComObject(ref nullRendererInputPin);
                CCommon.tReleaseComObject(ref nullRenderer);
                CCommon.tReleaseComObject(ref rendererConnectedOutputPin);
                CCommon.tReleaseComObject(ref rendererInputPin);
                CCommon.tReleaseComObject(ref audioRenderer);
            }
        }
예제 #12
0
        private void CreateFilters()
        {
            isValid = true;

            // grabber
            grabberVideo = new GrabberVideo(this);
            grabberAudio = new GrabberAudio(this);

            // objects
            graphObject        = null;
            grabberObjectVideo = null;
            grabberObjectAudio = null;

            int sourceBaseVideoPinIndex = 0;

            try {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                if (fileName.ToLower().EndsWith(".wmv"))
                {
                    type = Type.GetTypeFromCLSID(Clsid.WMASFReader);
                    if (type == null)
                    {
                        throw new ApplicationException("Failed creating ASF Reader filter");
                    }
                    sourceBase = (IBaseFilter)Activator.CreateInstance(type);
                    IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase;
                    sourceFile.Load(fileName, null);
                    graph.AddFilter(sourceBase, "source");
                    sourceBaseVideoPinIndex = 1;
                }
                else
                {
                    graph.AddSourceFilter(fileName, "source", out sourceBase);
                    if (sourceBase == null)
                    {
                        try {
                            type = Type.GetTypeFromCLSID(Clsid.AsyncReader);
                            if (type == null)
                            {
                                throw new ApplicationException("Failed creating Async Reader filter");
                            }
                            sourceBase = (IBaseFilter)Activator.CreateInstance(type);
                            IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase;
                            sourceFile.Load(fileName, null);
                            graph.AddFilter(sourceBase, "source");
                        } catch {
                            throw new ApplicationException("Failed creating source filter");
                        }
                    }
                    sourceBaseVideoPinIndex = 0;
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObjectVideo = Activator.CreateInstance(type);
                sampleGrabberVideo = (ISampleGrabber)grabberObjectVideo;
                grabberBaseVideo   = (IBaseFilter)grabberObjectVideo;

                // add grabber filters to graph
                graph.AddFilter(grabberBaseVideo, "grabberVideo");

                // set media type
                AMMediaType mediaType = new AMMediaType {
                    MajorType = MediaType.Video,
                    SubType   = MediaSubType.ARGB32 /* MediaSubType.RGB24 */
                };
                ;
                sampleGrabberVideo.SetMediaType(mediaType);

                // connect pins
                IPin outPin = Tools.GetOutPin(sourceBase, sourceBaseVideoPinIndex);
                IPin inPin  = Tools.GetInPin(grabberBaseVideo, 0);
                if (graph.Connect(outPin, inPin) < 0)
                {
                    throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo");
                }
                Marshal.ReleaseComObject(outPin);
                Marshal.ReleaseComObject(inPin);

                // get media type
                if (sampleGrabberVideo.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabberVideo.Width  = vih.BmiHeader.Width;
                    grabberVideo.Height = vih.BmiHeader.Height;
                    mediaType.Dispose();
                }

                if (useAudioGrabber)
                {
                    // *****************************************************************
                    // ******** Add the audio grabber to monitor audio peaks ***********
                    bool audioGrabberIsConnected  = false;
                    Tools.FilterInfo2 filterInfo2 = Tools.GetNextFilter(sourceBase, PinDirection.Output, 0);
                    foreach (Tools.PinInfo2 pinInfo2 in filterInfo2.Pins)
                    {
                        if (pinInfo2.PinInfo.Direction == PinDirection.Output)
                        {
                            if (!Tools.IsPinConnected(pinInfo2.Pin))
                            {
                                try {
                                    graph.Render(pinInfo2.Pin);

                                    AMMediaType mt = new AMMediaType();
                                    pinInfo2.Pin.ConnectionMediaType(mt);
                                    if (mt.MajorType == MediaType.Audio)
                                    {
                                        // Obtain a reference to the filter connected to the audio output of the video splitter (usually, this is the audio decoder)
                                        Tools.FilterInfo2 decoderFilterInfo2 = Tools.GetNextFilter(pinInfo2.PinInfo.Filter, PinDirection.Output, 0);

                                        // Remove all the filters connected to the audio decoder filter
                                        System.Collections.Generic.List <Tools.FilterInfo2> filtersInfo2 = new System.Collections.Generic.List <Tools.FilterInfo2>();
                                        Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(decoderFilterInfo2.Filter, PinDirection.Output, 0);
                                        while (true)
                                        {
                                            filtersInfo2.Add(testFilterInfo2);
                                            testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0);
                                            if (testFilterInfo2.Filter == null)
                                            {
                                                break;
                                            }
                                        }
                                        foreach (Tools.FilterInfo2 fi2 in filtersInfo2)
                                        {
                                            graph.RemoveFilter(fi2.Filter);
                                            fi2.Release();
                                        }

                                        // get type for sample grabber
                                        type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                                        if (type == null)
                                        {
                                            throw new ApplicationException("Failed creating audio sample grabber");
                                        }

                                        // create sample grabber
                                        grabberObjectAudio = Activator.CreateInstance(type);
                                        sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio;
                                        grabberBaseAudio   = (IBaseFilter)grabberObjectAudio;

                                        // add grabber filters to graph
                                        graph.AddFilter(grabberBaseAudio, "grabberAudio");

                                        // set media type
                                        AMMediaType mediaTypeAudio = new AMMediaType {
                                            MajorType  = MediaType.Audio,
                                            SubType    = MediaSubType.PCM,
                                            FormatType = FormatType.WaveEx
                                        };
                                        sampleGrabberAudio.SetMediaType(mediaTypeAudio);

                                        outPin = Tools.GetOutPin(decoderFilterInfo2.Filter, 0);
                                        inPin  = Tools.GetInPin(grabberBaseAudio, 0);
                                        if (graph.Connect(outPin, inPin) < 0)
                                        {
                                            throw new ApplicationException("Failed connecting filter to grabberBaseAudio");
                                        }
                                        Marshal.ReleaseComObject(outPin);
                                        Marshal.ReleaseComObject(inPin);

                                        // Finally, connect the grabber to the audio renderer
                                        outPin = Tools.GetOutPin(grabberBaseAudio, 0);
                                        graph.Render(outPin);

                                        mt = new AMMediaType();
                                        outPin.ConnectionMediaType(mt);
                                        if (!Tools.IsPinConnected(outPin))
                                        {
                                            throw new ApplicationException("Failed obtaining media audio information");
                                        }
                                        wavFormat = new WaveFormatEx();
                                        Marshal.PtrToStructure(mt.FormatPtr, wavFormat);
                                        Marshal.ReleaseComObject(outPin);

                                        // configure sample grabber
                                        sampleGrabberAudio.SetBufferSamples(false);
                                        sampleGrabberAudio.SetOneShot(false);
                                        sampleGrabberAudio.SetCallback(grabberAudio, 1);

                                        audioGrabberIsConnected = true;
                                        break;
                                    }
                                } catch {
                                }
                            }
                        }
                    }
                    filterInfo2.Release();
                    if (!audioGrabberIsConnected)
                    {
                        foreach (Tools.PinInfo2 pinInfo2 in Tools.GetPins(sourceBase))
                        {
                            if (!Tools.IsPinConnected(pinInfo2.Pin))
                            {
                                foreach (AMMediaType mt in Tools.GetMediaTypes(pinInfo2.Pin))
                                {
                                    if (mt.MajorType == MediaType.Audio)
                                    {
                                        // create sample grabber
                                        grabberObjectAudio = Activator.CreateInstance(type);
                                        sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio;
                                        grabberBaseAudio   = (IBaseFilter)grabberObjectAudio;

                                        // add grabber filters to graph
                                        graph.AddFilter(grabberBaseAudio, "grabberAudio");

                                        // set media type
                                        AMMediaType mediaTypeAudio = new AMMediaType {
                                            MajorType  = MediaType.Audio,
                                            SubType    = MediaSubType.PCM,
                                            FormatType = FormatType.WaveEx
                                        };
                                        sampleGrabberAudio.SetMediaType(mediaTypeAudio);

                                        inPin = Tools.GetInPin(grabberBaseAudio, 0);
                                        if (graph.Connect(pinInfo2.Pin, inPin) < 0)
                                        {
                                            throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo");
                                        }
                                        Marshal.ReleaseComObject(inPin);

                                        // Finally, connect the grabber to the audio renderer
                                        outPin = Tools.GetOutPin(grabberBaseAudio, 0);
                                        graph.Render(outPin);

                                        AMMediaType amt = new AMMediaType();
                                        outPin.ConnectionMediaType(amt);
                                        if (!Tools.IsPinConnected(outPin))
                                        {
                                            throw new ApplicationException("Failed obtaining media audio information");
                                        }
                                        wavFormat = new WaveFormatEx();
                                        Marshal.PtrToStructure(amt.FormatPtr, wavFormat);
                                        Marshal.ReleaseComObject(outPin);

                                        // configure sample grabber
                                        sampleGrabberAudio.SetBufferSamples(false);
                                        sampleGrabberAudio.SetOneShot(false);
                                        sampleGrabberAudio.SetCallback(grabberAudio, 1);

                                        audioGrabberIsConnected = true;

                                        break;
                                    }
                                }
                            }
                        }
                    }
                    // *****************************************************************
                }

                // let's do the rendering, if we don't need to prevent freezing
                if (!preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBaseVideo, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabberVideo.SetBufferSamples(false);
                sampleGrabberVideo.SetOneShot(false);
                sampleGrabberVideo.SetCallback(grabberVideo, 1);

                // disable clock, if someone requested it
                if (!referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    mediaFilter.SetSyncSource(null);
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media seek control
                mediaSeekControl = (IMediaSeeking)graphObject;

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;

                // get media audio control
                basicAudio = (IBasicAudio)graphObject;
            } catch (Exception exception) {
                DestroyFilters();

                // provide information to clients
                VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message));
            }
        }
예제 #13
0
        public static VideoInfoHeader2 GetSBEFrameSize(string pathToFile)
        {
            int           hr        = 0;
            IGraphBuilder graph     = null;
            IBaseFilter   capFilter = null;
            IBaseFilter   nRender   = null;

            try
            {
                graph = (IGraphBuilder) new FilterGraph();

                hr = graph.AddSourceFilter(pathToFile, "Source", out capFilter);
                DsError.ThrowExceptionForHR(hr);

#if DEBUG
                using (DsROTEntry rot = new DsROTEntry(graph))
                {
#endif

                IPin vPin           = null;
                IBaseFilter dec     = null;
                IPin sgIn           = null;
                IBaseFilter mpegDec = null;

                try
                {
                    dec = (IBaseFilter) new DTFilter();

                    hr = graph.AddFilter(dec, "Decrypt");
                    DsError.ThrowExceptionForHR(hr);

                    nRender = (IBaseFilter) new NullRenderer();

                    hr = graph.AddFilter((IBaseFilter)nRender, "Video Null Renderer");
                    DsError.ThrowExceptionForHR(hr);


                    IBaseFilter dec1 = FilterDefinition.AddToFilterGraph(FatAttitude.WTVTranscoder.FilterDefinitions.Decrypt.DTFilterPBDA, ref graph, "Decrypt1");
                    if (dec1 != null)
                    {
                        Marshal.ReleaseComObject(dec1);
                    }
                    dec1 = null;

                    mpegDec = FilterDefinition.AddToFilterGraph(FatAttitude.WTVTranscoder.FilterDefinitions.Video.VideoDecoderMpeg, ref graph, "MS MPEG Decoder");

                    sgIn = DsFindPin.ByDirection(mpegDec, PinDirection.Input, 0);

                    IEnumPins ppEnum;
                    IPin[]    pPins = new IPin[1];

                    hr = capFilter.EnumPins(out ppEnum);
                    DsError.ThrowExceptionForHR(hr);

                    try
                    {
                        while (ppEnum.Next(1, pPins, IntPtr.Zero) == 0)
                        {
                            IEnumMediaTypes emtDvr = null;
                            AMMediaType[]   amtDvr = new AMMediaType[1];

                            try
                            {
                                pPins[0].EnumMediaTypes(out emtDvr);

                                hr = emtDvr.Next(1, amtDvr, IntPtr.Zero);
                                DsError.ThrowExceptionForHR(hr);

                                if (amtDvr[0].majorType == MediaType.Video)
                                {
                                    if (graph.Connect(pPins[0], sgIn) >= 0)
                                    {
                                        vPin = pPins[0];
                                        break;
                                    }
                                }
                                if (pPins[0] != null)
                                {
                                    Marshal.ReleaseComObject(pPins[0]);
                                }
                            }
                            finally
                            {
                                if (emtDvr != null)
                                {
                                    Marshal.ReleaseComObject(emtDvr);
                                }
                                DsUtils.FreeAMMediaType(amtDvr[0]);
                            }
                        }
                    }
                    finally
                    {
                        if (ppEnum != null)
                        {
                            Marshal.ReleaseComObject(ppEnum);
                        }
                    }

                    FilterGraphTools.RenderPin(graph, mpegDec, "Video Output 1");
                }
                finally
                {
                    if (vPin != null)
                    {
                        Marshal.ReleaseComObject(vPin);
                    }

                    if (dec != null)
                    {
                        Marshal.ReleaseComObject(dec);
                    }

                    if (sgIn != null)
                    {
                        Marshal.ReleaseComObject(sgIn);
                    }

                    if (mpegDec != null)
                    {
                        Marshal.ReleaseComObject(mpegDec);
                    }
                }

                EventCode ec;

                IMediaControl mControl = graph as IMediaControl;
                IMediaEvent mEvent     = graph as IMediaEvent;

                hr = mControl.Pause();
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Run();
                DsError.ThrowExceptionForHR(hr);

                hr = mEvent.WaitForCompletion(1000, out ec);
                //DsError.ThrowExceptionForHR(hr);

                hr = mControl.Pause();
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Stop();
                DsError.ThrowExceptionForHR(hr);

                IPin mpgOut = null;
                sgIn = null;
                AMMediaType mt = new AMMediaType();

                try
                {
                    sgIn = DsFindPin.ByDirection(nRender, PinDirection.Input, 0);

                    if (sgIn != null)
                    {
                        hr = sgIn.ConnectedTo(out mpgOut);
                        DsError.ThrowExceptionForHR(hr);

                        hr = graph.RemoveFilter(nRender);
                        DsError.ThrowExceptionForHR(hr);

                        Marshal.ReleaseComObject(nRender);
                        nRender = null;

                        nRender = (IBaseFilter) new NullRenderer();
                        hr      = graph.AddFilter((IBaseFilter)nRender, "Video Null Renderer");
                        DsError.ThrowExceptionForHR(hr);

                        hr = graph.Render(mpgOut);
                        DsError.ThrowExceptionForHR(hr);

                        hr = mpgOut.ConnectionMediaType(mt);
                        DsError.ThrowExceptionForHR(hr);

                        if (mt.formatType == FormatType.VideoInfo2)
                        {
                            VideoInfoHeader2 vih = (VideoInfoHeader2)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader2));
                            return(vih);
                        }
                    }
                }
                finally
                {
                    DsUtils.FreeAMMediaType(mt);

                    if (mpgOut != null)
                    {
                        Marshal.ReleaseComObject(mpgOut);
                    }
                    if (sgIn != null)
                    {
                        Marshal.ReleaseComObject(sgIn);
                    }
                }
#if DEBUG
            }
#endif
            }
            finally
            {
                if (nRender != null)
                {
                    Marshal.ReleaseComObject(nRender);
                }
                if (capFilter != null)
                {
                    Marshal.ReleaseComObject(capFilter);
                }
                if (graph != null)
                {
                    while (Marshal.ReleaseComObject(graph) > 0)
                    {
                        ;
                    }
                }
            }
            return(null);
        }
예제 #14
0
        private void CreateFilters(Guid audioSubType)
        {
            isValid = false;
            int r;

            // grabber
            grabberAudio = new GrabberAudio(this);

            // objects
            graphObject        = null;
            grabberObjectAudio = null;

            try {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                r = graph.AddSourceFilter(fileName, "source", out sourceBase);
                if (sourceBase == null)
                {
                    throw new ApplicationException("Failed creating source filter");
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObjectAudio = Activator.CreateInstance(type);
                sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio;
                grabberBaseAudio   = (IBaseFilter)grabberObjectAudio;

                // add grabber filters to graph
                r = graph.AddFilter(grabberBaseAudio, "grabberAudio");

                // set media type
                AMMediaType mediaType = new AMMediaType {
                    MajorType  = MediaType.Audio,
                    SubType    = audioSubType,
                    FormatType = FormatType.WaveEx
                };
                r = sampleGrabberAudio.SetMediaType(mediaType);

                // render pin
                // TODO: Improve this! We can't always assume that the second pin will always be the audio pin -- we need to find it.
                IPin sbPin = Tools.GetOutPin(sourceBase, 1);
                if (sbPin == null)
                {
                    sbPin = Tools.GetOutPin(sourceBase, 0);
                }
                r = graph.Render(sbPin);

                IPin        outPin = Tools.GetOutPin(grabberBaseAudio, 0);
                AMMediaType mt     = new AMMediaType();
                r = outPin.ConnectionMediaType(mt);
                if (!Tools.IsPinConnected(outPin))
                {
                    throw new ApplicationException("Failed obtaining media information");
                }

                // disable clock, if someone requested it
                if (!referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    r = mediaFilter.SetSyncSource(null);
                }

                wavFormat = new WaveFormatEx();
                Marshal.PtrToStructure(mt.FormatPtr, wavFormat);
                Marshal.ReleaseComObject(outPin);

                // configure sample grabber
                r = sampleGrabberAudio.SetBufferSamples(false);
                r = sampleGrabberAudio.SetOneShot(false);
                r = sampleGrabberAudio.SetCallback(grabberAudio, 1);

                if (useNullRenderer)
                {
                    // Get a list of all the filters connected to the sample grabber
                    List <Tools.FilterInfo2> filtersInfo2    = new List <Tools.FilterInfo2>();
                    Tools.FilterInfo2        testFilterInfo2 = Tools.GetNextFilter(grabberBaseAudio, PinDirection.Output, 0);
                    while (true)
                    {
                        filtersInfo2.Add(testFilterInfo2);
                        testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0);
                        if (testFilterInfo2.Filter == null)
                        {
                            break;
                        }
                    }
                    // Remove the last filter, the audio renderer
                    r = graph.RemoveFilter(filtersInfo2[filtersInfo2.Count - 1].Filter);

                    // create null renderer
                    type = Type.GetTypeFromCLSID(Clsid.NullRenderer);
                    if (type == null)
                    {
                        throw new ApplicationException("Failed creating null renderer");
                    }

                    nullRendererObjectAudio = Activator.CreateInstance(type);
                    IBaseFilter nullRendererAudio = (IBaseFilter)nullRendererObjectAudio;

                    // add grabber filters to graph
                    r = graph.AddFilter(nullRendererAudio, "nullRenderer");

                    //outPin = Tools.GetOutPin(filtersInfo2[filtersInfo2.Count - 2].Filter, 0);
                    outPin = Tools.GetOutPin(grabberBaseAudio, 0);
                    IPin inPin = Tools.GetInPin(nullRendererAudio, 0);
                    if (graph.Connect(outPin, inPin) < 0)
                    {
                        throw new ApplicationException("Failed obtaining media audio information");
                    }
                    Marshal.ReleaseComObject(outPin);
                    Marshal.ReleaseComObject(inPin);
                }

                // configure video window
                IVideoWindow window = (IVideoWindow)graphObject;
                if (window != null)
                {
                    window.put_AutoShow(false);
                    window = null;
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media seek control
                mediaSeekControl = (IMediaSeeking)graphObject;
                mediaSeekControl.SetTimeFormat(TimeFormat.MediaTime);

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;

                // get media audio control
                basicAudio = (IBasicAudio)graphObject;

                isValid = true;
            } catch (Exception exception) {
                DestroyFilters();

                // provide information to clients
                AudioSourceError?.Invoke(this, new AudioSourceErrorEventArgs(exception.Message));
            }
        }
예제 #15
0
파일: Xbadpcm.cs 프로젝트: Nanook/TheGHOST
        private void ChangePinFormat(IPin pin, int samplesPerSec, bool forceMono)
        {
            AMMediaType outputMediaType = new AMMediaType();
            pin.ConnectionMediaType(outputMediaType);
            var formatEx = Marshal.PtrToStructure(outputMediaType.formatPtr, typeof(WaveFormatEx)) as WaveFormatEx;

            if (samplesPerSec != 0 && samplesPerSec < formatEx.nSamplesPerSec)
                formatEx.nSamplesPerSec = samplesPerSec;

            if (forceMono)
                formatEx.nChannels = 1;

            formatEx.nBlockAlign = (short)(formatEx.nChannels * 2); // * 2 for 16 bit
            formatEx.nAvgBytesPerSec = formatEx.nSamplesPerSec * formatEx.nBlockAlign;

            Marshal.StructureToPtr(formatEx, outputMediaType.formatPtr, true);
            IAMStreamConfig streamConfig = (IAMStreamConfig)pin;
            streamConfig.SetFormat(outputMediaType);
        }
예제 #16
0
        public void convert(object obj)
        {
            string[] pair       = obj as string[];
            string   srcfile    = pair[0];
            string   targetfile = pair[1];
            int      hr;

            ibfSrcFile = (IBaseFilter) new AsyncReader();
            hr         = gb.AddFilter(ibfSrcFile, "Reader");
            DsError.ThrowExceptionForHR(hr);
            IFileSourceFilter ifileSource = (IFileSourceFilter)ibfSrcFile;

            hr = ifileSource.Load(srcfile, null);
            DsError.ThrowExceptionForHR(hr);
            // the guid is the one from ffdshow
            Type   fftype  = Type.GetTypeFromCLSID(new Guid("0F40E1E5-4F79-4988-B1A9-CC98794E6B55"));
            object ffdshow = Activator.CreateInstance(fftype);

            hr = gb.AddFilter((IBaseFilter)ffdshow, "ffdshow");
            DsError.ThrowExceptionForHR(hr);
            // the guid is the one from the WAV Dest sample in the SDK
            Type   type     = Type.GetTypeFromCLSID(new Guid("3C78B8E2-6C4D-11d1-ADE2-0000F8754B99"));
            object wavedest = Activator.CreateInstance(type);

            hr = gb.AddFilter((IBaseFilter)wavedest, "WAV Dest");
            DsError.ThrowExceptionForHR(hr);
            // manually tell the graph builder to try to hook up the pin that is left
            IPin pWaveDestOut = null;

            hr = icgb.FindPin(wavedest, PinDirection.Output, null, null, true, 0, out pWaveDestOut);
            DsError.ThrowExceptionForHR(hr);
            // render step 1
            hr = icgb.RenderStream(null, null, ibfSrcFile, (IBaseFilter)ffdshow, (IBaseFilter)wavedest);
            DsError.ThrowExceptionForHR(hr);
            // Configure the sample grabber
            IBaseFilter baseGrabFlt = sg as IBaseFilter;

            ConfigSampleGrabber(sg);
            IPin pGrabberIn  = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);
            IPin pGrabberOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);

            hr = gb.AddFilter((IBaseFilter)sg, "SampleGrabber");
            DsError.ThrowExceptionForHR(hr);
            AMMediaType mediatype = new AMMediaType();

            sg.GetConnectedMediaType(mediatype);
            hr = gb.Connect(pWaveDestOut, pGrabberIn);
            DsError.ThrowExceptionForHR(hr);
            // file writer
            FileWriter      file_writer = new FileWriter();
            IFileSinkFilter fs          = (IFileSinkFilter)file_writer;

            fs.SetFileName(targetfile, null);
            hr = gb.AddFilter((DirectShowLib.IBaseFilter)file_writer, "File Writer");
            DsError.ThrowExceptionForHR(hr);
            // render step 2
            AMMediaType mediatype2 = new AMMediaType();

            pWaveDestOut.ConnectionMediaType(mediatype2);
            gb.Render(pGrabberOut);
            // alternatively to the file writer use the NullRenderer() to just discard the rest
            // assign control
            m_mediaCtrl = gb as IMediaControl;
            // run
            hr = m_mediaCtrl.Run();
            DsError.ThrowExceptionForHR(hr);
        }
예제 #17
0
        public void TestConnectDisconnectConnectedToConnectionMediaType()
        {
            int         hr;
            IBaseFilter aviSplitter  = null;
            IBaseFilter ibfAVISource = null;
            IPin        pinIn        = null;
            IPin        pinOut       = null;

            IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2;

            try
            {
                ibfAVISource = new AsyncReader() as IBaseFilter;

                // Add it to the graph
                hr = graphBuilder.AddFilter(ibfAVISource, "Ds.NET AsyncReader");
                Marshal.ThrowExceptionForHR(hr);

                // Set the file name
                IFileSourceFilter fsf = ibfAVISource as IFileSourceFilter;
                hr = fsf.Load(@"foo.avi", null);
                Marshal.ThrowExceptionForHR(hr);
                pinOut = DsFindPin.ByDirection(ibfAVISource, PinDirection.Output, 0);

                // Get the avi splitter
                aviSplitter = (IBaseFilter) new AviSplitter();

                // Add it to the graph
                hr = graphBuilder.AddFilter(aviSplitter, "Ds.NET AviSplitter");
                Marshal.ThrowExceptionForHR(hr);
                pinIn = DsFindPin.ByDirection(aviSplitter, PinDirection.Input, 0);

                Assert.IsNotNull(pinOut);
                Assert.IsNotNull(pinIn);

                // Test Connect
                hr = pinOut.Connect(pinIn, null);
                Marshal.ThrowExceptionForHR(hr);


                // Test ConnectedTo
                IPin pinConnect;
                hr = pinOut.ConnectedTo(out pinConnect);
                Marshal.ThrowExceptionForHR(hr);
                Assert.AreEqual(pinIn, pinConnect);


                // Test ConnectionMediaType
                AMMediaType mediaType = new AMMediaType();
                hr = pinIn.ConnectionMediaType(mediaType);
                Marshal.ThrowExceptionForHR(hr);
                Assert.IsNotNull(mediaType);
                Assert.IsNotNull(mediaType.majorType);

                // Test Disconnect
                hr = pinOut.Disconnect();
                Marshal.ThrowExceptionForHR(hr);
            }
            finally
            {
                Marshal.ReleaseComObject(graphBuilder);
            }
        }
예제 #18
0
        public void RefreshProperties()
        {
            this.Clear();

            int hr = 0;

            // if the pin is connected, get it's ConnectionMediaType
            bool        added   = false;
            AMMediaType contype = new AMMediaType();

            try
            {
                hr = _pin.ConnectionMediaType(contype);
                if (hr == 0)
                {
                    AppendText(ConnectionMediaTypeString(contype));
                    DsUtils.FreeAMMediaType(contype);
                    return;
                }
            }
            catch (Exception ex)
            {
#if DEBUG
                MessageBox.Show(ex.Message, "Error getting media connection type");
#endif
            }

            // the pin's not connected, so get each of the prefered media types for the pin
            AppendText("Prefered Media Types:\r\n");
            IEnumMediaTypes penum = null;
            hr = _pin.EnumMediaTypes(out penum);
            if (hr == 0 && penum != null)
            {
                AMMediaType[] mtypes  = new AMMediaType[1];
                IntPtr        fetched = Marshal.AllocCoTaskMem(4);
                try
                {
                    while (penum.Next(1, mtypes, fetched) == 0)
                    {
                        AppendText(ConnectionMediaTypeString(mtypes[0]));
                        DsUtils.FreeAMMediaType(mtypes[0]);
                        added = true;
                    }
                }
                catch (Exception ex)
                {
#if DEBUG
                    MessageBox.Show(ex.Message, "Error getting pin prefered type");
#endif
                }
                finally
                {
                    Marshal.FreeCoTaskMem(fetched);
                }

                // if we added no prefered media types to the textbox, set it to "None"
                if (added == false)
                {
                    AppendText("None\r\n");
                }
                Marshal.ReleaseComObject(penum);
            }
        }