Exemple #1
0
        public static void GetDesinterlaceMode(IVMRDeinterlaceControl9 pDeinterlace)
        {
            VMR9VideoDesc VideoDesc  = new VMR9VideoDesc();
            int           dwNumModes = 0;
            // Fill in the VideoDesc structure (not shown).
            int hr = pDeinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref dwNumModes, null);

            if (hr >= 0 && dwNumModes != 0)
            {
                // Allocate an array for the GUIDs that identify the modes.
                Guid[] pModes = new Guid[dwNumModes];
                if (pModes != null)
                {
                    // Fill the array.
                    hr = pDeinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref dwNumModes, pModes);
                    if (hr >= 0)
                    {
                        // Loop through each item and get the capabilities.
                        for (int i = 0; i < dwNumModes; i++)
                        {
                            VMR9DeinterlaceCaps Caps = new VMR9DeinterlaceCaps();
                            hr = pDeinterlace.GetDeinterlaceModeCaps(pModes[i], ref VideoDesc, ref Caps);
                            if (hr >= 0)
                            {
                                // Examine the Caps structure.
                            }
                        }
                    }
                }
            }
        }
Exemple #2
0
        public void TestGetNumberOfDeinterlaceModes()
        {
            int hr       = 0;
            int numModes = 0;

            videoDesc = GetVideoDesc9();

            hr = deinterlaceControl.GetNumberOfDeinterlaceModes(ref videoDesc, ref numModes, null);
            DsError.ThrowExceptionForHR(hr);

            deinterlaceModes = new Guid[numModes];

            hr = deinterlaceControl.GetNumberOfDeinterlaceModes(ref videoDesc, ref numModes, deinterlaceModes);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(hr == 0, "IVMRDeinterlaceControl9.GetNumberOfDeinterlaceModes");
        }
Exemple #3
0
        public void SetDeinterlaceMode()
        {
            if (!GUIGraphicsContext.IsEvr)
            {
                if (!_isVmr9Initialized)
                {
                    return;
                }
                Log.Debug("VMR9: SetDeinterlaceMode()");
                IVMRDeinterlaceControl9 deinterlace = (IVMRDeinterlaceControl9)_vmr9Filter;
                IPin InPin = null;
                int  hr    = _vmr9Filter.FindPin("VMR Input0", out InPin);
                if (hr != 0)
                {
                    Log.Error("VMR9: failed finding InPin {0:X}", hr);
                }
                AMMediaType mediatype = new AMMediaType();
                InPin.ConnectionMediaType(mediatype);
                //Start by getting the media type of the video stream.
                //Only VideoInfoHeader2 formats can be interlaced.
                if (mediatype.formatType == FormatType.VideoInfo2)
                {
                    Log.Debug("VMR9: SetDeinterlaceMode - FormatType = VideoInfo2");
                    int numModes = 0;
                    VideoInfoHeader2 VideoHeader2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(mediatype.formatPtr, VideoHeader2);
                    VMR9VideoDesc VideoDesc = new VMR9VideoDesc();
                    // If the FormatType is VideoInfo2, check the dwInterlaceFlags field for the AMInterlace.IsInterlaced flag.
                    //The presence of this flag indicates the video is interlaced.
                    if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                    {
                        Log.Debug("VMR9: SetDeinterlaceMode - Interlaced frame detected");
                        //Fill in the VMR9VideoDesc structure with a description of the video stream.
                        VideoDesc.dwSize         = Marshal.SizeOf(VideoDesc);    // dwSize: Set this field to sizeof(VMR9VideoDesc).
                        VideoDesc.dwSampleWidth  = VideoHeader2.BmiHeader.Width; // dwSampleWidth: Set this field to pBMI->biWidth.
                        VideoDesc.dwSampleHeight = VideoHeader2.BmiHeader.Height;
                        // dwSampleHeight: Set this field to abs(pBMI->biHeight).
                        //SampleFormat: This field describes the interlace characteristics of the media type.
                        //Check the dwInterlaceFlags field in the VIDEOINFOHEADER2 structure, and set SampleFormat equal to the equivalent VMR9_SampleFormat flag.
                        if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                        {
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.DisplayModeBobOnly) == 0)
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.ProgressiveFrame;
                            }
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.OneFieldPerSample) != 0)
                            {
                                if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                                {
                                    VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleEven;
                                }
                                else
                                {
                                    VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleOdd;
                                }
                            }
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedEvenFirst;
                            }
                            else
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedOddFirst;
                            }
                        }
                        //InputSampleFreq: This field gives the input frequency, which can be calculated from the AvgTimePerFrame field in the VIDEOINFOHEADER2 structure.
                        //In the general case, set dwNumerator to 10000000, and set dwDenominator to AvgTimePerFrame.
                        VideoDesc.InputSampleFreq.dwDenominator = 10000000;
                        VideoDesc.InputSampleFreq.dwNumerator   = (int)VideoHeader2.AvgTimePerFrame;
                        //OutputFrameFreq: This field gives the output frequency, which can be calculated from the InputSampleFreq value and the interleaving characteristics of the input stream:
                        //Set OutputFrameFreq.dwDenominator equal to InputSampleFreq.dwDenominator.
                        //If the input video is interleaved, set OutputFrameFreq.dwNumerator to 2 x InputSampleFreq.dwNumerator. (After deinterlacing, the frame rate is doubled.)
                        //Otherwise, set the value to InputSampleFreq.dwNumerator.
                        VideoDesc.OutputFrameFreq.dwDenominator = 10000000;
                        VideoDesc.OutputFrameFreq.dwNumerator   = (int)VideoHeader2.AvgTimePerFrame * 2;
                        VideoDesc.dwFourCC = VideoHeader2.BmiHeader.Compression; //dwFourCC: Set this field to pBMI->biCompression.
                        //Pass the structure to the IVMRDeinterlaceControl9::GetNumberOfDeinterlaceModes method.
                        //Call the method twice. The first call returns the number of deinterlace modes the hardware supports for the specified format.
                        hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, null);
                        if (hr == 0 && numModes != 0)
                        {
                            Guid[] modes = new Guid[numModes];
                            {
                                //Allocate an array of GUIDs of this size, and call the method again, passing in the address of the array.
                                //The second call fills the array with GUIDs. Each GUID identifies one deinterlacing mode.
                                hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, modes);
                                for (int i = 0; i < numModes; i++)
                                {
                                    //To get the capabiltiies of a particular mode, call the IVMRDeinterlaceControl9::GetDeinterlaceModeCaps method.
                                    //Pass in the same VMR9VideoDesc structure, along with one of the GUIDs from the array.
                                    //The method fills a VMR9DeinterlaceCaps structure with the mode capabilities.
                                    VMR9DeinterlaceCaps caps = new VMR9DeinterlaceCaps();
                                    caps.dwSize = Marshal.SizeOf(typeof(VMR9DeinterlaceCaps));
                                    hr          = deinterlace.GetDeinterlaceModeCaps(modes[i], ref VideoDesc, ref caps);
                                    if (hr == 0)
                                    {
                                        Log.Debug("VMR9: AvailableDeinterlaceMode - {0}: {1}", i, modes[i]);
                                        switch (caps.DeinterlaceTechnology)
                                        {
                                        //The algorithm is unknown or proprietary
                                        case VMR9DeinterlaceTech.Unknown:
                                        {
                                            Log.Info("VMR9: Unknown H/W de-interlace mode");
                                            break;
                                        }

                                        //The algorithm creates each missing line by repeating the line above it or below it.
                                        //This method creates jagged artifacts and is not recommended.
                                        case VMR9DeinterlaceTech.BOBLineReplicate:
                                        {
                                            Log.Info("VMR9: BOB Line Replicate capable");
                                            break;
                                        }

                                        //The algorithm creates the missing lines by vertically stretching each video field by a factor of two.
                                        //For example, it might average two lines or use a (-1, 9, 9, -1)/16 filter across four lines.
                                        //Slight vertical adjustments are made to ensure that the resulting image does not "bob" up and down
                                        case VMR9DeinterlaceTech.BOBVerticalStretch:
                                        {
                                            Log.Info("VMR9: BOB Vertical Stretch capable");
                                            verticalStretch = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm uses median filtering to recreate the pixels in the missing lines.
                                        case VMR9DeinterlaceTech.MedianFiltering:
                                        {
                                            Log.Info("VMR9: Median Filtering capable");
                                            medianFiltering = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm uses an edge filter to create the missing lines.
                                        //In this process, spatial directional filters are applied to determine the orientation of edges in the picture content.
                                        //Missing pixels are created by filtering along (rather than across) the detected edges.
                                        case VMR9DeinterlaceTech.EdgeFiltering:
                                        {
                                            Log.Info("VMR9: Edge Filtering capable");
                                            break;
                                        }

                                        //The algorithm uses spatial or temporal interpolation, switching between the two on a field-by-field basis, depending on the amount of motion.
                                        case VMR9DeinterlaceTech.FieldAdaptive:
                                        {
                                            Log.Info("VMR9: Field Adaptive capable");
                                            break;
                                        }

                                        //The algorithm uses spatial or temporal interpolation, switching between the two on a pixel-by-pixel basis, depending on the amount of motion.
                                        case VMR9DeinterlaceTech.PixelAdaptive:
                                        {
                                            Log.Info("VMR9: Pixel Adaptive capable");
                                            pixelAdaptive = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm identifies objects within a sequence of video fields.
                                        //Before it recreates the missing pixels, it aligns the movement axes of the individual objects in the scene to make them parallel with the time axis.
                                        case VMR9DeinterlaceTech.MotionVectorSteered:
                                        {
                                            Log.Info("VMR9: Motion Vector Steered capable");
                                            break;
                                        }
                                        }
                                    }
                                }
                            }
                            //Set the MP preferred h/w de-interlace modes in order of quality
                            //pixel adaptive, then median filtering & finally vertical stretch
                            if (pixelAdaptive != "")
                            {
                                Guid DeinterlaceMode = new Guid(pixelAdaptive);
                                Log.Debug("VMR9: trying pixel adaptive");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: pixel adaptive failed!");
                                }
                                else
                                {
                                    Log.Info("VMR9: setting pixel adaptive succeeded");
                                    medianFiltering = "";
                                    verticalStretch = "";
                                }
                            }
                            if (medianFiltering != "")
                            {
                                Guid DeinterlaceMode = new Guid(medianFiltering);
                                Log.Debug("VMR9: trying median filtering");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: median filtering failed!");
                                }
                                else
                                {
                                    Log.Info("VMR9: setting median filtering succeeded");
                                    verticalStretch = "";
                                }
                            }
                            if (verticalStretch != "")
                            {
                                Guid DeinterlaceMode = new Guid(verticalStretch);
                                Log.Debug("VMR9: trying vertical stretch");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: Cannot set H/W de-interlace mode - using VMR9 fallback");
                                }
                                Log.Info("VMR9: setting vertical stretch succeeded");
                            }
                        }
                        else
                        {
                            Log.Info("VMR9: No H/W de-interlaced modes supported, using fallback preference");
                        }
                    }
                    else
                    {
                        Log.Info("VMR9: progressive mode detected - no need to de-interlace");
                    }
                }
                //If the format type is VideoInfo, it must be a progressive frame.
                else
                {
                    Log.Info("VMR9: no need to de-interlace this video source");
                }
                DsUtils.FreeAMMediaType(mediatype);
                //release the VMR9 pin
                hr = DirectShowUtil.ReleaseComObject(InPin);

                InPin     = null;
                mediatype = null;
            }
        }
Exemple #4
0
        // this method is an implementation of the procedure describe in this page :
        // http://msdn.microsoft.com/library/en-us/directshow/htm/settingdeinterlacepreferences.asp?frame=true
        private VMR9VideoDesc GetVideoDesc9()
        {
            int           hr        = 0;
            AMMediaType   mediaType = new AMMediaType();
            VMR9VideoDesc vDesc     = new VMR9VideoDesc();

            vDesc.dwSize = Marshal.SizeOf(typeof(VMR9VideoDesc));

            IPin pinIn = DsFindPin.ByDirection(vmr9, PinDirection.Input, 0);

            hr = pinIn.ConnectionMediaType(mediaType);
            DsError.ThrowExceptionForHR(hr);

            Marshal.ReleaseComObject(pinIn);

            if (mediaType.formatType == FormatType.VideoInfo2)
            {
                VideoInfoHeader2 videoHeader = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader2));
                if ((videoHeader.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                {
                    vDesc.dwSampleWidth  = videoHeader.BmiHeader.Width;
                    vDesc.dwSampleHeight = videoHeader.BmiHeader.Height;
                    vDesc.SampleFormat   = ConvertInterlaceFlags(videoHeader.InterlaceFlags);
                    vDesc.dwFourCC       = videoHeader.BmiHeader.Compression;

                    switch (videoHeader.AvgTimePerFrame)
                    {
                    case 166833:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 60000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 333667:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 30000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 333666: // this value is not define in the paper but is returned by testme.iso
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 30000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 417188:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 24000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 200000:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 50;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    case 400000:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 25;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    case 416667:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 24;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    default:
                    {
                        throw new ApplicationException("Unknown AvgTimePerFrame : " + videoHeader.AvgTimePerFrame);
                    }
                    }

                    // Video is interleaved
                    vDesc.OutputFrameFreq.dwNumerator   = vDesc.InputSampleFreq.dwNumerator * 2;
                    vDesc.OutputFrameFreq.dwDenominator = vDesc.InputSampleFreq.dwDenominator;
                }
                else
                {
                    throw new ApplicationException("Only interlaced formats");
                }
            }
            else
            {
                throw new ApplicationException("Only VIDEOINFOHEADER2 formats can be interlaced");
            }

            DsUtils.FreeAMMediaType(mediaType);
            return(vDesc);
        }