/// <summary>
        /// Notifies us that the media type has changed.  When CheckMediaType is called, you can't
        /// assume that returning S_Ok means that the specified media type *will* be used.  When
        /// the two pins involved in the connection finally agree on a media type, that media
        /// type will be passed to this method.
        /// Also, if the media *samples* include a media type (see IMediaSample::GetMediaType on
        /// MSDN), the new type is checked with CheckMediaType, then passed to MediaTypeChanged.
        /// </summary>
        /// <param name="amt">The proposed alternative media type.  You must eventually call DsUtils.FreeAMMediaType on this to avoid a leak</param>
        /// <returns>The return value is ignored.  Declining a media type must occur in CheckMediaType</returns>
        override public int OnMediaTypeChanged(AMMediaType pmt)
        {
            // Let the parent do its bit
            base.OnMediaTypeChanged(pmt);

            Debug.Assert(pmt.formatType == FormatType.VideoInfo2);

            // Overwrite the stride based on the new AMMediaType.  This code is essential since
            // sometimes (like when using the EVR), the stride is changed by sending a new
            // AMMediaType on the samples.

            VideoInfoHeader2 vih2 = (VideoInfoHeader2)Marshal.PtrToStructure(m_pmt.formatPtr, typeof(VideoInfoHeader2));

            // Compute the stride
            if (vih2.BmiHeader.Compression == 3)
            {
                // It isn't possible for MFGetStrideForBitmapInfoHeader to handle straight RGB
                // formats (they'd need to rest of the BMIHeader).  Handle that as a special case
                m_Stride = (m_Width * m_bpp) / 8;
            }
            else
            {
                MFExtern.MFGetStrideForBitmapInfoHeader((int)m_SubType, vih2.BmiHeader.Width, out m_Stride);
            }

            return(S_Ok);
        }
        /// <summary>
        /// Reads the sample entry properties from stream.
        /// </summary>
        /// <param name="reader">The stream reader.</param>
        internal override void ReadSampleEntryPropertiesFromStream(BoxBinaryReader reader)
        {
            predefined1          = reader.ReadUInt16();
            reserved1            = reader.ReadUInt16();
            predefined2          = reader.ReadBytes(12);
            this.Width           = reader.ReadUInt16();
            this.Height          = reader.ReadUInt16();
            this.HorizResolution = reader.ReadUInt32();
            this.VertResolution  = reader.ReadUInt32();
            reserved2            = reader.ReadUInt32();
            this.FrameCount      = reader.ReadUInt16();

            byte[] buffer = new byte[0x20];
            reader.Read(buffer, 0, 0x20);
            int count = buffer[0];

            if (count < 0x20)
            {
                this.CompressorName = Encoding.UTF8.GetString(buffer, 1, count);
            }
            this.Depth  = reader.ReadUInt16();
            predefined3 = reader.ReadUInt16();

            if (reader.PeekNextBoxType() != BoxType.Null)
            {
                ReadInnerBoxes(reader, BoxType.Esds, BoxType.Avcc, BoxType.Avc1, BoxType.Dvc1, BoxType.Btrt, BoxType.Sinf);
                this.VideoCodecData = GetVideoCodecDataFromInnerBoxes();
            }
            else
            {
                var videoInfoHeader = new VideoInfoHeader2(reader);
                this.VideoCodecData = new VideoTrackCodecData(videoInfoHeader);
            }
        }
Exemple #3
0
        private object GetField(AMMediaType mediaType, String fieldName)
        {
            object formatStruct;

            if (mediaType.formatType == FormatType.WaveEx)
            {
                formatStruct = new WaveFormatEx();
            }
            else if (mediaType.formatType == FormatType.VideoInfo)
            {
                formatStruct = new VideoInfoHeader();
            }
            else if (mediaType.formatType == FormatType.VideoInfo2)
            {
                formatStruct = new VideoInfoHeader2();
            }
            else
            {
                throw new NotSupportedException("This device does not support a recognized format block.");
            }

            // Retrieve the nested structure
            Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

            // Find the required field
            Type      structType = formatStruct.GetType();
            FieldInfo fieldInfo  = structType.GetField(fieldName);

            if (fieldInfo != null)
            {
                return(fieldInfo.GetValue(formatStruct));
            }
            return(null);
        }
Exemple #4
0
        public int GetLatency(out long prtLatency)
        {
#if HAMED_LOG_METHOD_INFO
            MethodBase method = new StackTrace().GetFrame(0).GetMethod();
            Console.WriteLine(this.GetType().FullName + " - " + method.Name + " - " + method.ToString());
#endif

            prtLatency = UNITS / 30;
            AMMediaType mt = Pins[0].CurrentMediaType;
            if (mt.majorType == MediaType.Video)
            {
                {
                    VideoInfoHeader _pvi = mt;
                    if (_pvi != null)
                    {
                        prtLatency = _pvi.AvgTimePerFrame;
                    }
                }
                {
                    VideoInfoHeader2 _pvi = mt;
                    if (_pvi != null)
                    {
                        prtLatency = _pvi.AvgTimePerFrame;
                    }
                }
            }
            return(NOERROR);
        }
Exemple #5
0
        protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            if (streamConfig == null)
            {
                throw new NotSupportedException();
            }
            this.assertStopped();
            this.derenderGraph();
            IntPtr      zero      = IntPtr.Zero;
            AMMediaType structure = new AMMediaType();

            try
            {
                object obj2;
                int    format = streamConfig.GetFormat(out zero);
                if (format != 0)
                {
                    Marshal.ThrowExceptionForHR(format);
                }
                Marshal.PtrToStructure(zero, structure);
                if (structure.formatType == FormatType.WaveEx)
                {
                    obj2 = new WaveFormatEx();
                }
                else if (structure.formatType == FormatType.VideoInfo)
                {
                    obj2 = new VideoInfoHeader();
                }
                else
                {
                    if (structure.formatType != FormatType.VideoInfo2)
                    {
                        throw new NotSupportedException("This device does not support a recognized format block.");
                    }
                    obj2 = new VideoInfoHeader2();
                }
                Marshal.PtrToStructure(structure.formatPtr, obj2);
                FieldInfo field = obj2.GetType().GetField(fieldName);
                if (field == null)
                {
                    throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");
                }
                field.SetValue(obj2, newValue);
                Marshal.StructureToPtr(obj2, structure.formatPtr, false);
                format = streamConfig.SetFormat(structure);
                if (format != 0)
                {
                    Marshal.ThrowExceptionForHR(format);
                }
            }
            finally
            {
                DsUtils.FreeAMMediaType(structure);
                Marshal.FreeCoTaskMem(zero);
            }
            this.renderStream = false;
            this.renderGraph();
            this.startPreviewIfNeeded();
            return(null);
        }
Exemple #6
0
 public static extern void MFCreateVideoMediaTypeFromVideoInfoHeader2(
     VideoInfoHeader2 pVideoInfoHeader,
     int cbVideoInfoHeader,
     long AdditionalVideoFlags,
     [In, MarshalAs(UnmanagedType.LPStruct)] Guid pSubtype,
     out IMFVideoMediaType ppIVideoMediaType
     );
Exemple #7
0
        // Retrieve capabilities of a video device
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index)
        {
            AMMediaType           mediaType = null;
            VideoStreamConfigCaps caps      = new VideoStreamConfigCaps( );

            try
            {
                // retrieve capabilities struct at the specified index
                int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps);

                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                if (mediaType.FormatType == FormatType.VideoInfo)
                {
                    VideoInfoHeader videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    MediaType        = GUID.GetNickname(mediaType.SubType);
                    FrameSize        = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height);
                    BitCount         = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame);
                    MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval);
                    MinimumFrameRate = (int)(10000000 / caps.MaxFrameInterval);
                }
                else if (mediaType.FormatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 videoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader2));

                    MediaType        = GUID.GetNickname(mediaType.SubType);
                    FrameSize        = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height);
                    BitCount         = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame);
                    MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval);
                    MinimumFrameRate = (int)(10000000 / caps.MaxFrameInterval);
                }
                else
                {
                    throw new ApplicationException("Unsupported format found.");
                }

                // ignore 12 bpp formats for now, since it was noticed they cause issues on Windows 8
                // TODO: proper fix needs to be done so ICaptureGraphBuilder2::RenderStream() does not fail
                // on such formats
                if (BitCount <= 12)
                {
                    //throw new ApplicationException( "Unsupported format found." );
                }
            }
            finally
            {
                if (mediaType != null)
                {
                    mediaType.Dispose( );
                }
            }
        }
        /// <summary>
        /// Gets the current frame size
        /// </summary>
        /// <returns>The frame size in a bitmap info header</returns>
        private BitmapInfoHeader GetFrameSize()
        {
            BitmapInfoHeader bmiHeader = null;

            try
            {
                IntPtr      pmt       = IntPtr.Zero;
                AMMediaType mediaType = new AMMediaType();
                try
                {
                    // Get the current format info
                    mediaType.formatType = FormatType.VideoInfo2;
                    int hr = _streamConfig.GetFormat(out mediaType);
                    if (hr != 0)
                    {
                        Log.Log.Info("GetFrameSize: FAILED to get format - {0}", hr);
                        Marshal.ThrowExceptionForHR(hr);
                        return(bmiHeader);
                    }
                    // The formatPtr member points to different structures
                    // dependingon the formatType
                    if (mediaType.formatType == FormatType.VideoInfo)
                    {
                        VideoInfoHeader temp = new VideoInfoHeader();
                        Marshal.PtrToStructure(mediaType.formatPtr, temp);
                        bmiHeader = temp.BmiHeader;
                    }
                    else if (mediaType.formatType == FormatType.VideoInfo2)
                    {
                        VideoInfoHeader2 temp = new VideoInfoHeader2();
                        Marshal.PtrToStructure(mediaType.formatPtr, temp);
                        bmiHeader = temp.BmiHeader;
                    }
                    else if (mediaType.formatType == FormatType.Mpeg2Video)
                    {
                        MPEG2VideoInfo temp = new MPEG2VideoInfo();
                        Marshal.PtrToStructure(mediaType.formatPtr, temp);
                        bmiHeader = temp.hdr.BmiHeader;
                    }
                    else if (mediaType.formatType == FormatType.MpegVideo)
                    {
                        MPEG1VideoInfo temp = new MPEG1VideoInfo();
                        Marshal.PtrToStructure(mediaType.formatPtr, temp);
                        bmiHeader = temp.hdr.BmiHeader;
                    }
                }
                finally
                {
                    Marshal.FreeCoTaskMem(pmt);
                }
            }
            catch (Exception)
            {
                Log.Log.Info("  VideoCaptureDevice.getStreamConfigSetting() FAILED ");
            }
            return(bmiHeader);
        }
Exemple #9
0
        protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            if (streamConfig == null)
                throw new NotSupportedException();

            object returnValue = null;
            IntPtr pmt = IntPtr.Zero;
            AMMediaType mediaType = new AMMediaType();

            try
            {
                // Get the current format info
                int hr = streamConfig.GetFormat(out pmt);
                if (hr != 0)
                    Marshal.ThrowExceptionForHR(hr);
                Marshal.PtrToStructure(pmt, mediaType);

                // The formatPtr member points to different structures
                // dependingon the formatType
                object formatStruct;
                if (mediaType.formatType == FormatType.WaveEx)
                    formatStruct = new WaveFormatEx();
                else if (mediaType.formatType == FormatType.VideoInfo)
                    formatStruct = new VideoInfoHeader();
                else if (mediaType.formatType == FormatType.VideoInfo2)
                    formatStruct = new VideoInfoHeader2();
                else
                    throw new NotSupportedException("This device does not support a recognized format block.");

                // Retrieve the nested structure
                Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

                // Find the required field
                Type structType = formatStruct.GetType();
                FieldInfo fieldInfo = structType.GetField(fieldName);
                if (fieldInfo == null)
                    throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");

                // Update the value of the field
                fieldInfo.SetValue(formatStruct, newValue);

                // PtrToStructure copies the data so we need to copy it back
                Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false);

                // Save the changes
                hr = streamConfig.SetFormat(mediaType);
                if (hr != 0)
                    Marshal.ThrowExceptionForHR(hr);
            }
            finally
            {
                //DsUtils.FreeAMMediaType(mediaType);
                Marshal.FreeCoTaskMem(pmt);
            }

            return (returnValue);
        }
        private string VideoInfo2FormatString(AMMediaType mtype)
        {
            VideoInfoHeader2 vih = new VideoInfoHeader2();

            Marshal.PtrToStructure(mtype.formatPtr, vih);
            return("\t\tAspect Ratio: " + vih.PictAspectRatioX.ToString() + "x" + vih.PictAspectRatioY.ToString() + "\r\n" +
                   "\t\tInterlace Format: " + vih.InterlaceFlags.ToString() + "\r\n" +
                   "\t\trcSrc " + vih.SrcRect.ToRectangle().ToString() + "\r\n" +
                   "\t\trcDest " + vih.TargetRect.ToRectangle().ToString() + "\r\n" +
                   "\t\tCtrlFlags " + vih.ControlFlags.ToString() + "\r\n");
        }
Exemple #11
0
        public static StreamInfo GetStreamInfo(AMMediaType cMt)
        {
            StreamInfo si = new StreamInfo();

            si.MediaType = cMt.majorType;
            si.SubType   = cMt.subType;

            if (cMt.majorType == MediaType.Video)
            {
                if (cMt.formatType == FormatType.VideoInfo2)
                {
                    si.VideoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(cMt.formatPtr, typeof(VideoInfoHeader2));
                }
                else if (cMt.formatType == FormatType.VideoInfo)
                {
                    VideoInfoHeader2 vih = (VideoInfoHeader2)Marshal.PtrToStructure(cMt.formatPtr, typeof(VideoInfoHeader));
                    si.VideoInfo = vih;
                }
                else if (cMt.formatType == FormatType.Mpeg2Video)
                {
                    si.VideoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(cMt.formatPtr, typeof(VideoInfoHeader2));
                }
                si.SimpleType = FromFourCC(si.VideoInfo.BmiHeader.Compression);
            }
            else if (cMt.majorType == MediaType.Audio)
            {
                if (cMt.formatType == FormatType.WaveEx)
                {
                    si.AudioInfo = (WaveFormatEx)Marshal.PtrToStructure(cMt.formatPtr, typeof(WaveFormatEx));
                }

                if (cMt.subType == MediaSubType.DolbyAC3 || cMt.subType == MediaSubType.DOLBY_AC3_SPDIF)
                {
                    si.SimpleType = "AC-3";
                }
                else if (cMt.subType == MediaSubType.MPEG1AudioPayload || cMt.subType == MediaSubType.MPEG1Audio || cMt.subType == MediaSubType.Mpeg2Audio)
                {
                    si.SimpleType = "MPEG Audio";
                }
                //else if (cMt.subType == MediaSubType.DTS_Audio || cMt.subType == MediaSubType.MKV_DTS_Audio)
                //    si.SimpleType = "DTS";
                else if (si.AudioInfo != null)
                {
                    WaveFormat wf = (WaveFormat)si.AudioInfo.wFormatTag;  // an enum (see bottom of doc)
                    si.SimpleType = wf.ToString();
                }
            }
            else if (cMt.majorType == MediaType.MSTVCaption || cMt.majorType == MediaType.AuxLine21Data)
            {
                si.MediaType = MediaType.MSTVCaption;
            }
            return(si);
        }
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index)
        {
            AMMediaType           mediaType = null;
            VideoStreamConfigCaps caps      = new VideoStreamConfigCaps( );

            try
            {
                int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps);

                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                if (mediaType.FormatType == FormatType.VideoInfo)
                {
                    VideoInfoHeader videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    FrameSize        = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height);
                    BitCount         = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame);
                    MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval);
                }
                else if (mediaType.FormatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 videoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader2));

                    FrameSize        = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height);
                    BitCount         = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame);
                    MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval);
                }
                else
                {
                    throw new ApplicationException("Unsupported format found.");
                }



                if (BitCount <= 12)
                {
                    throw new ApplicationException("Unsupported format found.");
                }
            }
            finally
            {
                if (mediaType != null)
                {
                    mediaType.Dispose( );
                }
            }
        }
Exemple #13
0
        public int SetMediaType(AMMediaType pmt)
        {
#if HAMED_LOG_METHOD_INFO
            MethodBase method = new StackTrace().GetFrame(0).GetMethod();
            Console.WriteLine(this.GetType().FullName + " - " + method.Name + " - " + method.ToString());
#endif

            lock (m_Lock)
            {
                if (m_hBitmap != IntPtr.Zero)
                {
                    DeleteObject(m_hBitmap);
                    m_hBitmap = IntPtr.Zero;
                }
                BitmapInfoHeader _bmi = pmt;
                m_bmi.bmiHeader.BitCount = _bmi.BitCount;
                if (_bmi.Height != 0)
                {
                    m_bmi.bmiHeader.Height = _bmi.Height;
                }
                if (_bmi.Width > 0)
                {
                    m_bmi.bmiHeader.Width = _bmi.Width;
                }
                m_bmi.bmiHeader.Compression = BI_RGB;
                m_bmi.bmiHeader.Planes      = 1;
                m_bmi.bmiHeader.ImageSize   = ALIGN16(m_bmi.bmiHeader.Width) * ALIGN16(Math.Abs(m_bmi.bmiHeader.Height)) * m_bmi.bmiHeader.BitCount / 8;
                m_nWidth    = _bmi.Width;
                m_nHeight   = _bmi.Height;
                m_nBitCount = _bmi.BitCount;

                {
                    VideoInfoHeader _pvi = pmt;
                    if (_pvi != null)
                    {
                        m_nAvgTimePerFrame = _pvi.AvgTimePerFrame;
                    }
                }
                {
                    VideoInfoHeader2 _pvi = pmt;
                    if (_pvi != null)
                    {
                        m_nAvgTimePerFrame = _pvi.AvgTimePerFrame;
                    }
                }
            }
            return(NOERROR);
        }
Exemple #14
0
        public static LocalVideoSourceCapabilityFormat GetMediaTypeInfo(AMMediaType mediaType, out int height, out int width, out int compression, out VideoInfoHeader v, out VideoInfoHeader2 v2)
        {
            compression = -1;
            v           = null;
            v2          = null;
            if (mediaType.formatType == FormatType.VideoInfo)
            {
                v = new VideoInfoHeader();
                Marshal.PtrToStructure(mediaType.formatPtr, v);
                height      = v.BmiHeader.Height;
                width       = v.BmiHeader.Width;
                compression = v.BmiHeader.Compression;
            }
            else if (mediaType.formatType == FormatType.VideoInfo2)
            {
                v2 = new VideoInfoHeader2();
                Marshal.PtrToStructure(mediaType.formatPtr, v2);

                height      = v2.BmiHeader.Height;
                width       = v2.BmiHeader.Width;
                compression = v2.BmiHeader.Compression;
            }
            else
            {
                throw new InvalidOperationException($"Invalid media type FormatType={mediaType}");
            }

            switch (compression)
            {
            case 0x47504A4D: return(LocalVideoSourceCapabilityFormat.MJpeg);

            case 0x32595559: return(LocalVideoSourceCapabilityFormat.Raw);

            case 0x34363248: return(LocalVideoSourceCapabilityFormat.H264);

            case 0x3231564e: return(LocalVideoSourceCapabilityFormat.NV12);

            case 0x30323449: return(LocalVideoSourceCapabilityFormat.I420);

            case 0x0: return(LocalVideoSourceCapabilityFormat.Empty);

            default:
                Log.Warning($"Unknown video source format/compression {compression}");
                return(LocalVideoSourceCapabilityFormat.Unknown);
            }
        }
        // Token: 0x06000027 RID: 39 RVA: 0x00002718 File Offset: 0x00000918
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index)
        {
            AMMediaType           ammediaType           = null;
            VideoStreamConfigCaps videoStreamConfigCaps = new VideoStreamConfigCaps();

            try
            {
                int streamCaps = videoStreamConfig.GetStreamCaps(index, out ammediaType, videoStreamConfigCaps);
                if (streamCaps != 0)
                {
                    Marshal.ThrowExceptionForHR(streamCaps);
                }
                if (ammediaType.FormatType == FormatType.VideoInfo)
                {
                    VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(ammediaType.FormatPtr, typeof(VideoInfoHeader));
                    this.FrameSize        = new Size(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height);
                    this.BitCount         = (int)videoInfoHeader.BmiHeader.BitCount;
                    this.AverageFrameRate = (int)(10000000L / videoInfoHeader.AverageTimePerFrame);
                    this.MaximumFrameRate = (int)(10000000L / videoStreamConfigCaps.MinFrameInterval);
                }
                else
                {
                    if (!(ammediaType.FormatType == FormatType.VideoInfo2))
                    {
                        throw new ApplicationException("Unsupported format found.");
                    }
                    VideoInfoHeader2 videoInfoHeader2 = (VideoInfoHeader2)Marshal.PtrToStructure(ammediaType.FormatPtr, typeof(VideoInfoHeader2));
                    this.FrameSize        = new Size(videoInfoHeader2.BmiHeader.Width, videoInfoHeader2.BmiHeader.Height);
                    this.BitCount         = (int)videoInfoHeader2.BmiHeader.BitCount;
                    this.AverageFrameRate = (int)(10000000L / videoInfoHeader2.AverageTimePerFrame);
                    this.MaximumFrameRate = (int)(10000000L / videoStreamConfigCaps.MinFrameInterval);
                }
                if (this.BitCount <= 12)
                {
                    throw new ApplicationException("Unsupported format found.");
                }
            }
            finally
            {
                if (ammediaType != null)
                {
                    ammediaType.Dispose();
                }
            }
        }
Exemple #16
0
        public int SetMediaType(AMMediaType pmt)
        {
            lock (m_Lock)
            {
                if (m_hBitmap != IntPtr.Zero)
                {
                    DeleteObject(m_hBitmap);
                    m_hBitmap = IntPtr.Zero;
                }
                BitmapInfoHeader _bmi = pmt;
                m_bmi.bmiHeader.BitCount = _bmi.BitCount;
                if (_bmi.Height != 0)
                {
                    m_bmi.bmiHeader.Height = _bmi.Height;
                }
                if (_bmi.Width > 0)
                {
                    m_bmi.bmiHeader.Width = _bmi.Width;
                }
                m_bmi.bmiHeader.Compression = BI_RGB;
                m_bmi.bmiHeader.Planes      = 1;
                m_bmi.bmiHeader.ImageSize   = ALIGN16(m_bmi.bmiHeader.Width) * ALIGN16(Math.Abs(m_bmi.bmiHeader.Height)) * m_bmi.bmiHeader.BitCount / 8;
                m_nWidth    = _bmi.Width;
                m_nHeight   = _bmi.Height;
                m_nBitCount = _bmi.BitCount;

                {
                    VideoInfoHeader _pvi = pmt;
                    if (_pvi != null)
                    {
                        m_nAvgTimePerFrame = _pvi.AvgTimePerFrame;
                    }
                }
                {
                    VideoInfoHeader2 _pvi = pmt;
                    if (_pvi != null)
                    {
                        m_nAvgTimePerFrame = _pvi.AvgTimePerFrame;
                    }
                }
            }
            return(NOERROR);
        }
        /// <summary>
        /// AnnexB formatted h264 bitstream
        /// </summary>
        /// <param name="streamInfo"></param>
        /// <returns></returns>
        public static AMMediaType H264_AnnexB(InputstreamInfo streamInfo)
        {
            int width  = (int)streamInfo.Width;
            int height = (int)streamInfo.Height;

            if (streamInfo.ExtraData.Length > 0)
            {
                var codecData = new H264CodecData(streamInfo.ExtraData);

                SPSUnit spsUnit = new SPSUnit(codecData.SPS);
                width  = spsUnit.Width();
                height = spsUnit.Height();
            }

            VideoInfoHeader2 vi = new VideoInfoHeader2();

            vi.SrcRect.right     = width;
            vi.SrcRect.bottom    = height;
            vi.TargetRect.right  = width;
            vi.TargetRect.bottom = height;

            int hcf = HCF(width, height);

            vi.PictAspectRatioX = width / hcf;
            vi.PictAspectRatioY = height / hcf;

            vi.BmiHeader.Width       = width;
            vi.BmiHeader.Height      = height;
            vi.BmiHeader.Planes      = 1;
            vi.BmiHeader.Compression = FOURCC_H264;

            AMMediaType amt = new AMMediaType();

            amt.majorType           = MediaType.Video;
            amt.subType             = MediaSubType.H264;
            amt.temporalCompression = true;
            amt.fixedSizeSamples    = false;
            amt.sampleSize          = 1;
            amt.SetFormat(vi);
            return(amt);
        }
Exemple #18
0
        // Token: 0x06000198 RID: 408 RVA: 0x00012064 File Offset: 0x00010264
        protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            this.assertStopped();
            this.derenderGraph();
            object      result      = null;
            IntPtr      zero        = IntPtr.Zero;
            AMMediaType ammediaType = new AMMediaType();

            try
            {
                int num = streamConfig.GetFormat(out zero);
                Marshal.PtrToStructure(zero, ammediaType);
                object obj;
                if (ammediaType.formatType == FormatType.WaveEx)
                {
                    obj = new WaveFormatEx();
                }
                else if (ammediaType.formatType == FormatType.VideoInfo)
                {
                    obj = new VideoInfoHeader();
                }
                else if (ammediaType.formatType == FormatType.VideoInfo2)
                {
                    obj = new VideoInfoHeader2();
                }
                Marshal.PtrToStructure(ammediaType.formatPtr, RuntimeHelpers.GetObjectValue(obj));
                Type      type  = obj.GetType();
                FieldInfo field = type.GetField(fieldName);
                field.SetValue(RuntimeHelpers.GetObjectValue(obj), RuntimeHelpers.GetObjectValue(newValue));
                Marshal.StructureToPtr(RuntimeHelpers.GetObjectValue(obj), ammediaType.formatPtr, false);
                num = streamConfig.SetFormat(ammediaType);
            }
            finally
            {
                DsUtils.FreeAMMediaType(ammediaType);
                Marshal.FreeCoTaskMem(zero);
            }
            this.renderGraph();
            this.startPreviewIfNeeded();
            return(result);
        }
Exemple #19
0
        public int GetLatency(out long prtLatency)
        {
            prtLatency = UNITS / 30;
            AMMediaType mt = Pins[0].CurrentMediaType;

            if (mt.majorType == MediaType.Video)
            {
                {
                    VideoInfoHeader _pvi = mt;
                    if (_pvi != null)
                    {
                        prtLatency = _pvi.AvgTimePerFrame;
                    }
                }
                {
                    VideoInfoHeader2 _pvi = mt;
                    if (_pvi != null)
                    {
                        prtLatency = _pvi.AvgTimePerFrame;
                    }
                }
            }
            return(NOERROR);
        }
Exemple #20
0
        /// <summary>
        ///  Retrieves the value of one member of the IAMStreamConfig format block.
        ///  Helper function for several properties that expose
        ///  video/audio settings from IAMStreamConfig.GetFormat().
        ///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
        ///  AMMediaType.formatPtr points to a format block structure.
        ///  This format block structure may be one of several
        ///  types, the type being determined by AMMediaType.formatType.
        /// </summary>
        private object getStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName)
        {
            if (streamConfig == null)
            {
                throw new NotSupportedException();
            }

            object      returnValue = null;
            IntPtr      pmt         = IntPtr.Zero;
            AMMediaType mediaType   = new AMMediaType();

            try
            {
                // Get the current format info
                int hr = streamConfig.GetFormat(out pmt);
                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                Marshal.PtrToStructure(pmt, mediaType);

                // The formatPtr member points to different structures
                // dependingon the formatType
                object formatStruct;
                if (mediaType.formatType == FormatType.WaveEx)
                {
                    formatStruct = new WaveFormatEx();
                }
                else if (mediaType.formatType == FormatType.VideoInfo)
                {
                    formatStruct = new VideoInfoHeader();
                }
                else if (mediaType.formatType == FormatType.VideoInfo2)
                {
                    formatStruct = new VideoInfoHeader2();
                }
                else
                {
                    throw new NotSupportedException("This device does not support a recognized format block.");
                }

                // Retrieve the nested structure
                Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

                // Find the required field
                Type      structType = formatStruct.GetType();
                FieldInfo fieldInfo  = structType.GetField(fieldName);
                if (fieldInfo == null)
                {
                    throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");
                }

                // Extract the field's current value
                returnValue = fieldInfo.GetValue(formatStruct);
            }
            finally
            {
                DsUtils.FreeAMMediaType(mediaType);
                Marshal.FreeCoTaskMem(pmt);
            }

            return(returnValue);
        }
        /// <summary>
        /// The GSSF calls this method to determine whether a proposed media type is acceptable.  While
        /// the GSSF offers up the value set in SetMediaType, downstream filters may counter-propose
        /// media types that are similar.  This method allows c# programs to accept or reject proposed
        /// alternatives.
        /// </summary>
        /// <param name="amt">The proposed alternative media type.  Do NOT call DsUtils.FreeAMMediaType on this</param>
        /// <returns>S_Ok to accept the media type, a negative value to decline</returns>
        override public int CheckMediaType(AMMediaType amt)
        {
            // The media types sent down from the EVR can be very different from the one
            // we proposed.  I've seen a basic 320x240 get transformed to one where biWidth
            // was 384, but TargetRect & SrcRect are still 320 x 240.  Further, the formatsize
            // is sometimes > 1000 bytes, instead of the ~90 you would expect from VIH2.

            // To "approve" such bizarre constructs requires some creativity.  This code is
            // adapted from the EVRPlayer sample.

            if (amt == null ||
                amt.formatType != FormatType.VideoInfo2 ||
                amt.formatSize < Marshal.SizeOf(typeof(VideoInfoHeader2)))
            {
                return(DsResults.E_TypeNotAccepted);
            }

            // ValidateBitmapInfoHeader() - doesn't seem necessary.

            VideoInfoHeader2 pvi = (VideoInfoHeader2)Marshal.PtrToStructure(amt.formatPtr, typeof(VideoInfoHeader2));

            AMMediaType pmtDesired = m_pmt;

            VideoInfoHeader2 vihDesired = (VideoInfoHeader2)Marshal.PtrToStructure(pmtDesired.formatPtr, typeof(VideoInfoHeader2));

            // Check the basics
            if (
                (amt.majorType != pmtDesired.majorType) ||
                (amt.subType != pmtDesired.subType) ||
                // (amt.formatType != pmtDesired.formatType) ||
                (amt.formatSize < pmtDesired.formatSize) ||
                (amt.formatSize == 0) ||
                (amt.formatPtr == IntPtr.Zero)
                )
            {
                return(DsResults.E_InvalidMediaType);
            }

            // Check some of the basic VIH2 stuff

            if ((pvi.AvgTimePerFrame != vihDesired.AvgTimePerFrame) ||
                (pvi.InterlaceFlags != vihDesired.InterlaceFlags) ||
                (pvi.PictAspectRatioX != vihDesired.PictAspectRatioX) ||
                (pvi.PictAspectRatioY != vihDesired.PictAspectRatioY) ||
                (pvi.BmiHeader.Compression != vihDesired.BmiHeader.Compression))
            {
                return(DsResults.E_InvalidMediaType);
            }

            // Check the image size - If the biWidth has changed, we won't be able to just
            // compare to the value we proposed
            if (pvi.BmiHeader.ImageSize != 0)
            {
                int cbImage;

                if (pvi.BmiHeader.Compression != 3)
                {
                    // ARGH! Create a MediaFoundation.Misc.BitmapInfoHeader from a DirectShowLib.BitmapInfoHeader (even
                    // tho they are the same), so we can call MFCalculateBitmapImageSize
                    MediaFoundation.Misc.BitmapInfoHeader bmi = new MediaFoundation.Misc.BitmapInfoHeader();
                    GCHandle gh = GCHandle.Alloc(pvi.BmiHeader, GCHandleType.Pinned);
                    try
                    {
                        Marshal.PtrToStructure(gh.AddrOfPinnedObject(), bmi);
                    }
                    finally
                    {
                        gh.Free();
                    }

                    bool b;
                    MFExtern.MFCalculateBitmapImageSize(bmi, Marshal.SizeOf(typeof(BitmapInfo)), out cbImage, out b);
                }
                else
                {
                    cbImage = ((m_bpp * m_Width) / 8) * m_Height;
                }

                if (pvi.BmiHeader.ImageSize != cbImage)
                {
                    return(DsResults.E_InvalidMediaType);
                }
            }

            // Check the dimensions
            Rectangle rcImage = new Rectangle(0, 0, m_Width, Math.Abs(m_Height));

            // Heights must match.
            if (Math.Abs(pvi.BmiHeader.Height) != Math.Abs(vihDesired.BmiHeader.Height))
            {
                return(DsResults.E_InvalidMediaType);
            }

            // If rcTarget is empty, then biWidth must be our original width.
            if (pvi.TargetRect == null || pvi.TargetRect.ToRectangle().IsEmpty)
            {
                if (pvi.BmiHeader.Width != vihDesired.BmiHeader.Width)
                {
                    return(DsResults.E_InvalidMediaType);
                }
            }
            // Otherwise, rcTarget must be the same as our image size
            else if (pvi.TargetRect != rcImage)
            {
                return(DsResults.E_InvalidMediaType);
            }

            // Finally, biWidth must be at least as wide as our image width.
            if (pvi.BmiHeader.Width < vihDesired.BmiHeader.Width)
            {
                return(DsResults.E_InvalidMediaType);
            }

            // Everything checks out.
            return(S_Ok);
        }
Exemple #22
0
        public int CheckMediaType(AMMediaType pmt)
        {
            if (pmt == null)
            {
                return(E_POINTER);
            }
            if (pmt.formatPtr == IntPtr.Zero)
            {
                return(VFW_E_INVALIDMEDIATYPE);
            }
            if (pmt.majorType != MediaType.Video)
            {
                return(VFW_E_INVALIDMEDIATYPE);
            }
            if (
                pmt.subType != MediaSubType.RGB24 &&
                pmt.subType != MediaSubType.RGB32 &&
                pmt.subType != MediaSubType.ARGB32
                )
            {
                return(VFW_E_INVALIDMEDIATYPE);
            }
            BitmapInfoHeader _bmi = pmt;

            if (_bmi == null)
            {
                return(E_UNEXPECTED);
            }
            if (_bmi.Compression != BI_RGB)
            {
                return(VFW_E_TYPE_NOT_ACCEPTED);
            }
            if (_bmi.BitCount != 24 && _bmi.BitCount != 32)
            {
                return(VFW_E_TYPE_NOT_ACCEPTED);
            }
            VideoStreamConfigCaps _caps;

            GetDefaultCaps(0, out _caps);
            if (
                _bmi.Width < _caps.MinOutputSize.Width ||
                _bmi.Width > _caps.MaxOutputSize.Width
                )
            {
                return(VFW_E_INVALIDMEDIATYPE);
            }
            long _rate = 0;

            {
                VideoInfoHeader _pvi = pmt;
                if (_pvi != null)
                {
                    _rate = _pvi.AvgTimePerFrame;
                }
            }
            {
                VideoInfoHeader2 _pvi = pmt;
                if (_pvi != null)
                {
                    _rate = _pvi.AvgTimePerFrame;
                }
            }
            if (_rate < _caps.MinFrameInterval || _rate > _caps.MaxFrameInterval)
            {
                return(VFW_E_INVALIDMEDIATYPE);
            }
            return(NOERROR);
        }
Exemple #23
0
		private object GetField(AMMediaType mediaType, String fieldName)
		{
			object formatStruct;
			if ( mediaType.formatType == FormatType.WaveEx )
				formatStruct = new WaveFormatEx();
			else if ( mediaType.formatType == FormatType.VideoInfo )
				formatStruct = new VideoInfoHeader();
			else if ( mediaType.formatType == FormatType.VideoInfo2 )
				formatStruct = new VideoInfoHeader2();
			else
				throw new NotSupportedException( "This device does not support a recognized format block." );

			// Retrieve the nested structure
			Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

			// Find the required field
			Type structType = formatStruct.GetType();
			FieldInfo fieldInfo = structType.GetField(fieldName);
			if(fieldInfo != null)
			{
				return fieldInfo.GetValue(formatStruct);
			}
			return null;
		}
        // this method is an implementation of the procedure describe in this page :
        // http://msdn.microsoft.com/library/en-us/directshow/htm/settingdeinterlacepreferences.asp?frame=true
        private VMRVideoDesc GetVideoDesc7()
        {
            int          hr        = 0;
            AMMediaType  mediaType = new AMMediaType();
            VMRVideoDesc vDesc     = new VMRVideoDesc();

            vDesc.dwSize = Marshal.SizeOf(typeof(VMRVideoDesc));

            IPin pinIn = DsFindPin.ByDirection(vmr, PinDirection.Input, 0);

            hr = pinIn.ConnectionMediaType(mediaType);
            DsError.ThrowExceptionForHR(hr);

            Marshal.ReleaseComObject(pinIn);

            if (mediaType.formatType == FormatType.VideoInfo2)
            {
                VideoInfoHeader2 videoHeader = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader2));
                if ((videoHeader.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                {
                    vDesc.dwSampleWidth        = videoHeader.BmiHeader.Width;
                    vDesc.dwSampleHeight       = videoHeader.BmiHeader.Height;
                    vDesc.SingleFieldPerSample = ((videoHeader.InterlaceFlags & AMInterlace.OneFieldPerSample) != 0);
                    vDesc.dwFourCC             = videoHeader.BmiHeader.Compression;

                    switch (videoHeader.AvgTimePerFrame)
                    {
                    case 166833:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 60000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 333667:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 30000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 333666: // this value is not define in the paper but is returned by testme.iso
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 30000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 417188:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 24000;
                        vDesc.InputSampleFreq.dwDenominator = 1001;
                        break;
                    }

                    case 200000:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 50;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    case 400000:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 25;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    case 416667:
                    {
                        vDesc.InputSampleFreq.dwNumerator   = 24;
                        vDesc.InputSampleFreq.dwDenominator = 1;
                        break;
                    }

                    default:
                    {
                        throw new ApplicationException("Unknown AvgTimePerFrame : " + videoHeader.AvgTimePerFrame);
                    }
                    }

                    // Video is interleaved
                    vDesc.OutputFrameFreq.dwNumerator   = vDesc.InputSampleFreq.dwNumerator * 2;
                    vDesc.OutputFrameFreq.dwDenominator = vDesc.InputSampleFreq.dwDenominator;
                }
                else
                {
                    throw new ApplicationException("Only interlaced formats");
                }
            }
            else
            {
                throw new ApplicationException("Only VIDEOINFOHEADER2 formats can be interlaced");
            }

            DsUtils.FreeAMMediaType(mediaType);
            return(vDesc);
        }
Exemple #25
0
        private object SetStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            try
            {
                object      returnValue = null;
                IntPtr      pmt         = IntPtr.Zero;
                AMMediaType mediaType   = new AMMediaType();

                try
                {
                    // Get the current format info
                    int hr = streamConfig.GetFormat(out mediaType);
                    if (hr != 0)
                    {
                        AppLogger.Message(String.Format("  VideoCaptureDevice:setStreamConfigSetting() FAILED to set:{0} (getformat) hr:{1}", fieldName, hr));
                        return(null);//Marshal.ThrowExceptionForHR(hr);
                    }
                    //Log.Info("  VideoCaptureDevice:setStreamConfigSetting() get formattype");
                    // The formatPtr member points to different structures
                    // dependingon the formatType
                    object formatStruct;
                    if (mediaType.formatType == FormatType.WaveEx)
                    {
                        formatStruct = new WaveFormatEx();
                    }
                    else if (mediaType.formatType == FormatType.VideoInfo)
                    {
                        formatStruct = new VideoInfoHeader();
                    }
                    else if (mediaType.formatType == FormatType.VideoInfo2)
                    {
                        formatStruct = new VideoInfoHeader2();
                    }
                    else if (mediaType.formatType == FormatType.Mpeg2Video)
                    {
                        formatStruct = new MPEG2VideoInfo();
                    }
                    else if (mediaType.formatType == FormatType.None)
                    {
                        AppLogger.Message("  VideoCaptureDevice:setStreamConfigSetting() FAILED no format returned");
                        return(null);// throw new NotSupportedException("This device does not support a recognized format block.");
                    }
                    else
                    {
                        AppLogger.Message("  VideoCaptureDevice:setStreamConfigSetting() FAILED unknown fmt");
                        return(null);//throw new NotSupportedException("This device does not support a recognized format block.");
                    }
                    //Log.Info("  VideoCaptureDevice.setStreamConfigSetting() get formatptr");
                    // Retrieve the nested structure
                    Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

                    // Find the required field
                    //Log.Info("  VideoCaptureDevice.setStreamConfigSetting() get field");
                    Type      structType = formatStruct.GetType();
                    FieldInfo fieldInfo  = structType.GetField(fieldName);
                    if (fieldInfo == null)
                    {
                        AppLogger.Message(String.Format("  VideoCaptureDevice:setStreamConfigSetting() FAILED to to find member:{0}", fieldName));
                        throw new NotSupportedException("FAILED to find the member '" + fieldName + "' in the format block.");
                    }
                    //Log.Info("  VideoCaptureDevice.setStreamConfigSetting() set value");
                    // Update the value of the field
                    fieldInfo.SetValue(formatStruct, newValue);

                    // PtrToStructure copies the data so we need to copy it back
                    Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false);

                    //Log.Info("  VideoCaptureDevice.setStreamConfigSetting() set format");
                    // Save the changes
                    hr = streamConfig.SetFormat(mediaType);
                    if (hr != 0)
                    {
                        AppLogger.Message(String.Format("  VideoCaptureDevice:setStreamConfigSetting() FAILED to set:{0} {1}", fieldName, hr));
                        return(null);//Marshal.ThrowExceptionForHR(hr);
                    }
                    //else Log.Info("  VideoCaptureDevice.setStreamConfigSetting() set:{0}",fieldName);
                    //Log.Info("  VideoCaptureDevice.setStreamConfigSetting() done");
                }
                finally
                {
                    Marshal.FreeCoTaskMem(pmt);
                }
                return(returnValue);
            }
            catch (Exception)
            {
                AppLogger.Message("  VideoCaptureDevice.:setStreamConfigSetting() FAILED ");
            }
            return(null);
        }
Exemple #26
0
        protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            if (streamConfig == null)
                throw new NotSupportedException();

            object returnValue = null;
            IntPtr pmt = IntPtr.Zero;
            AMMediaType mediaType = new AMMediaType();

            try
            {
                // Get the current format info
                int hr = streamConfig.GetFormat(out pmt);
                if (hr != 0)
                    Marshal.ThrowExceptionForHR(hr);
                Marshal.PtrToStructure(pmt, mediaType);

                // The formatPtr member points to different structures
                // dependingon the formatType
                object formatStruct;
                if (mediaType.formatType == FormatType.WaveEx)
                    formatStruct = new WaveFormatEx();
                else if (mediaType.formatType == FormatType.VideoInfo)
                    formatStruct = new VideoInfoHeader();
                else if (mediaType.formatType == FormatType.VideoInfo2)
                    formatStruct = new VideoInfoHeader2();
                else
                    throw new NotSupportedException("This device does not support a recognized format block.");

                // Retrieve the nested structure
                Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

                // Find the required field
                Type structType = formatStruct.GetType();
                FieldInfo fieldInfo = structType.GetField(fieldName);
                if (fieldInfo == null)
                    throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");

                // Update the value of the field
                fieldInfo.SetValue(formatStruct, newValue);

                // PtrToStructure copies the data so we need to copy it back
                Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false);

                // Save the changes
                hr = streamConfig.SetFormat(mediaType);
                if (hr != 0)
                    Marshal.ThrowExceptionForHR(hr);
            }
            finally
            {
                //DsUtils.FreeAMMediaType(mediaType);
                Marshal.FreeCoTaskMem(pmt);
            }

            return (returnValue);
        }
Exemple #27
0
        private object GetStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName)
        {
            object returnValue = null;

            try
            {
                if (streamConfig == null)
                {
                    throw new NotSupportedException();
                }

                IntPtr      pmt       = IntPtr.Zero;
                AMMediaType mediaType = new AMMediaType();

                try
                {
                    // Get the current format info
                    mediaType.formatType = FormatType.VideoInfo2;
                    int hr = streamConfig.GetFormat(out mediaType);
                    if (hr != 0)
                    {
                        AppLogger.Message(String.Format("VideoCaptureDevice:getStreamConfigSetting() FAILED to get:{0} (not supported)", fieldName));
                        Marshal.ThrowExceptionForHR(hr);
                    }
                    // The formatPtr member points to different structures
                    // dependingon the formatType
                    object formatStruct;
                    //Log.Info("  VideoCaptureDevice.getStreamConfigSetting() find formattype");
                    if (mediaType.formatType == FormatType.WaveEx)
                    {
                        formatStruct = new WaveFormatEx();
                    }
                    else if (mediaType.formatType == FormatType.VideoInfo)
                    {
                        formatStruct = new VideoInfoHeader();
                    }
                    else if (mediaType.formatType == FormatType.VideoInfo2)
                    {
                        formatStruct = new VideoInfoHeader2();
                    }
                    else if (mediaType.formatType == FormatType.Mpeg2Video)
                    {
                        formatStruct = new MPEG2VideoInfo();
                    }
                    else if (mediaType.formatType == FormatType.None)
                    {
                        //Log.Info("VideoCaptureDevice:getStreamConfigSetting() FAILED no format returned");
                        //throw new NotSupportedException("This device does not support a recognized format block.");
                        return(null);
                    }
                    else
                    {
                        //Log.Info("VideoCaptureDevice:getStreamConfigSetting() FAILED unknown fmt:{0} {1} {2}", mediaType.formatType, mediaType.majorType, mediaType.subType);
                        //throw new NotSupportedException("This device does not support a recognized format block.");
                        return(null);
                    }

                    //Log.Info("  VideoCaptureDevice.getStreamConfigSetting() get formatptr");
                    // Retrieve the nested structure
                    Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

                    // Find the required field
                    //Log.Info("  VideoCaptureDevice.getStreamConfigSetting() get field");
                    Type      structType = formatStruct.GetType();
                    FieldInfo fieldInfo  = structType.GetField(fieldName);
                    if (fieldInfo == null)
                    {
                        //Log.Info("VideoCaptureDevice.getStreamConfigSetting() FAILED to to find member:{0}", fieldName);
                        //throw new NotSupportedException("VideoCaptureDevice:FAILED to find the member '" + fieldName + "' in the format block.");
                        return(null);
                    }

                    // Extract the field's current value
                    //Log.Info("  VideoCaptureDevice.getStreamConfigSetting() get value");
                    returnValue = fieldInfo.GetValue(formatStruct);
                    //Log.Info("  VideoCaptureDevice.getStreamConfigSetting() done");
                }
                finally
                {
                    Marshal.FreeCoTaskMem(pmt);
                }
            }
            catch (Exception)
            {
                AppLogger.Message("  VideoCaptureDevice.getStreamConfigSetting() FAILED ");
            }
            return(returnValue);
        }
Exemple #28
0
 public static extern void MFInitMediaTypeFromVideoInfoHeader2(
     [In] IMFMediaType pMFType,
     VideoInfoHeader2 pVIH2,
     [In] int cbBufSize,
     [In, MarshalAs(UnmanagedType.LPStruct)] Guid pSubtype
     );
Exemple #29
0
        DSStreamResultCodes InitWithVideoFile(WTVStreamingVideoRequest strq)
        {
            UsingSBEFilter = false;  // Not using stream buffer

            // Init variables
            IPin[]   pin             = new IPin[1];
            string   dPin            = string.Empty;
            string   sName           = string.Empty;
            string   dName           = string.Empty;
            string   sPin            = string.Empty;
            FileInfo fiInputFile     = new FileInfo(strq.FileName);
            string   txtOutputFNPath = fiInputFile.FullName + ".wmv";

            if (
                (fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) ||
                (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                )
            {
                return(DSStreamResultCodes.ErrorInvalidFileType);
            }

            int hr = 0;

            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object", 0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter);                            // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter; // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                SendDebugMessage("Setting filename", 0);
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string          destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Handy to have an ACM Wrapper filter hanging around for AVI files with MP3 audio
                SendDebugMessage("Adding ACM Wrapper", 0);
                IBaseFilter ACMFilter = FilterDefinition.AddToFilterGraph(FilterDefinitions.Other.ACMWrapperFilter, ref graphbuilder);
                dc.Add(ACMFilter);

                // Render file - then build graph
                SendDebugMessage("Rendering file", 0);
                graphbuilder.RenderFile(fiInputFile.FullName, null);
                SendDebugMessage("Saving graph", 0);
                FilterGraphTools.SaveGraphFile(graphbuilder, "C:\\ProgramData\\RemotePotato\\lastfiltergraph.grf");

                // Are both our ASF pins connected?
                IPin ASFVidInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                IPin ASFAudInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent   tempEvent   = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);

                // Get media type from vid input pin for ASF writer
                AMMediaType pmt = new AMMediaType();
                hr = ASFVidInputPin.ConnectionMediaType(pmt);

                FrameSize SourceFrameSize = null;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    SourceFrameSize = new FrameSize(pvih2.BmiHeader.Width, pvih2.BmiHeader.Height);
                }
                else if (pmt.formatType == FormatType.VideoInfo)  //{05589f80-c356-11ce-bf01-00aa0055595a}
                {
                    VideoInfoHeader pvih = new VideoInfoHeader();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih);
                    SourceFrameSize = new FrameSize(pvih.BmiHeader.Width, pvih.BmiHeader.Height);
                }
                else
                {
                    SourceFrameSize = new FrameSize(200, 200); // SQUARE
                }
                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS != FilterState.Stopped)
                {
                    DsError.ThrowExceptionForHR(tempControl.Stop());
                }
                // Free up media type
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // (re)Configure the ASF writer with the selected WM Profile
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                // source
                Marshal.ReleaseComObject(ASFVidInputPin); ASFVidInputPin = null;
                Marshal.ReleaseComObject(ASFAudInputPin); ASFAudInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return(DSStreamResultCodes.ErrorExceptionOccurred);
            }

            return(DSStreamResultCodes.OK);
        }
Exemple #30
0
        DSStreamResultCodes InitWithStreamBufferFile(WTVStreamingVideoRequest strq)
        {
            // Init variables
            //IPin[] pin = new IPin[1];
            IBaseFilter DecFilterAudio   = null;
            IBaseFilter DecFilterVideo   = null;
            IBaseFilter MainAudioDecoder = null;
            IBaseFilter MainVideoDecoder = null;
            string      dPin             = string.Empty;
            string      sName            = string.Empty;
            string      dName            = string.Empty;
            string      sPin             = string.Empty;
            FileInfo    fiInputFile      = new FileInfo(strq.FileName);
            string      txtOutputFNPath  = fiInputFile.FullName + ".wmv";

            if (
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) &&
                (!fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                )
            {
                return(DSStreamResultCodes.ErrorInvalidFileType);
            }

            int hr = 0;

            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object", 0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Add the DVRMS/WTV file / filter to the graph
                SendDebugMessage("Add SBE Source Filter", 0);

                hr = graphbuilder.AddSourceFilter(fiInputFile.FullName, "SBE Filter", out currentSBEfilter); // class variable
                DsError.ThrowExceptionForHR(hr);
                dc.Add(currentSBEfilter);

                // Get the SBE audio and video out pins
                IPin SBEVidOutPin, SBEAudOutPin;
                SBEAudOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Audio, MediaSubType.Null);
                SBEVidOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Video, MediaSubType.Null);

                // Set up two decrypt filters according to file extension (assume audio and video both present )
                if (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
                {
                    // Add DVR-MS decrypt filters
                    SendDebugMessage("Add DVRMS (bda) decryption", 0);
                    DecFilterAudio = (IBaseFilter) new DTFilter();  // THESE ARE FOR DVR-MS (BDA DTFilters)
                    DecFilterVideo = (IBaseFilter) new DTFilter();
                    graphbuilder.AddFilter(DecFilterAudio, "Decrypt / Tag");
                    graphbuilder.AddFilter(DecFilterVideo, "Decrypt / Tag 0001");
                }
                else  // Add WTV decrypt filters
                {
                    SendDebugMessage("Add WTV (pbda) decryption", 0);
                    DecFilterAudio = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder);
                    DecFilterVideo = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder, "PBDA DTFilter 0001");
                }
                dc.Add(DecFilterAudio);
                dc.Add(DecFilterVideo);

                // Make the first link in the graph: SBE => Decrypts
                SendDebugMessage("Connect SBE => Decrypt filters", 0);
                IPin DecVideoInPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, SBEVidOutPin, DecVideoInPin, false);
                IPin DecAudioInPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Input, 0);
                if (DecAudioInPin == null)
                {
                    SendDebugMessage("WARNING: No Audio Input to decrypt filter.");
                }
                else
                {
                    FilterGraphTools.ConnectFilters(graphbuilder, SBEAudOutPin, DecAudioInPin, false);
                }

                // Get Dec Audio Out pin
                IPin DecAudioOutPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Output, 0);

                // Examine Dec Audio out for audio format
                SendDebugMessage("Examining source audio", 0);
                AMMediaType AudioMediaType = null;
                getPinMediaType(DecAudioOutPin, MediaType.Audio, Guid.Empty, Guid.Empty, ref AudioMediaType);
                SendDebugMessage("Audio media subtype: " + AudioMediaType.subType.ToString());
                SendDebugMessage("Examining Audio StreamInfo");
                StreamInfo si         = FileInformation.GetStreamInfo(AudioMediaType);
                bool       AudioIsAC3 = (si.SimpleType == "AC-3");
                if (AudioIsAC3)
                {
                    SendDebugMessage("Audio type is AC3");
                }
                else
                {
                    SendDebugMessage("Audio type is not AC3");
                }
                si = null;
                DsUtils.FreeAMMediaType(AudioMediaType);

                // Add an appropriate audio decoder
                if (AudioIsAC3)
                {
                    if (!FilterGraphTools.IsThisComObjectInstalled(FilterDefinitions.Audio.AudioDecoderMPCHC.CLSID))
                    {
                        SendDebugMessage("Missing AC3 Audio Decoder, and AC3 audio detected.");
                        return(DSStreamResultCodes.ErrorAC3CodecNotFound);
                    }
                    else
                    {
                        MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMPCHC, ref graphbuilder);   //MainAudioDecoder = FatAttitude.WTVTranscoder.FilterDefinitions.Audio.AudioDecoderFFDShow.AddToFilterGraph(ref graph);
                        Guid tmpGuid; MainAudioDecoder.GetClassID(out tmpGuid);
                        SendDebugMessage("Main Audio decoder CLSID is " + tmpGuid.ToString());
                    }
                }
                else
                {
                    MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMSDTV, ref graphbuilder);
                }

                // Add a video decoder
                SendDebugMessage("Add DTV decoder", 0);
                MainVideoDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Video.VideoDecoderMSDTV, ref graphbuilder);
                dc.Add(MainAudioDecoder);
                dc.Add(MainVideoDecoder);

                //SetAudioDecoderOutputToPCMStereo(MainAudioDecoder);

                // Add a null renderer
                SendDebugMessage("Add null renderer", 0);
                NullRenderer MyNullRenderer = new NullRenderer();
                dc.Add(MyNullRenderer);
                hr = graphbuilder.AddFilter((IBaseFilter)MyNullRenderer, @"Null Renderer");
                DsError.ThrowExceptionForHR(hr);

                // Link up video through to null renderer
                SendDebugMessage("Connect video to null renderer", 0);
                // Make the second link:  Decrypts => DTV
                IPin DecVideoOutPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Output, 0);
                IPin DTVVideoInPin  = DsFindPin.ByName(MainVideoDecoder, @"Video Input"); // IPin DTVVideoInPin = DsFindPin.ByDirection(DTVVideoDecoder, PinDirection.Input, 0);  // first one should be video input?  //
                FilterGraphTools.ConnectFilters(graphbuilder, DecVideoOutPin, DTVVideoInPin, false);
                // 3. DTV => Null renderer
                IPin NullRInPin     = DsFindPin.ByDirection((IBaseFilter)MyNullRenderer, PinDirection.Input, 0);
                IPin DTVVideoOutPin = FilterGraphTools.FindPinByMediaType(MainVideoDecoder, PinDirection.Output, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, NullRInPin, false);
                Marshal.ReleaseComObject(NullRInPin); NullRInPin = null;

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent   tempEvent   = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                DsError.ThrowExceptionForHR(tempControl.Run());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);
                //DsError.ThrowExceptionForHR(hr);  // DO *NOT* DO THIS HERE!  THERE MAY WELL BE AN ERROR DUE TO EVENTS RAISED BY THE STREAM BUFFER ENGINE, THIS IS A DELIBERATE TEST RUN OF THE GRAPH
                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS == FilterState.Running)
                {
                    DsError.ThrowExceptionForHR(tempControl.Stop());
                }

                // Remove null renderer
                hr = graphbuilder.RemoveFilter((IBaseFilter)MyNullRenderer);

                // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                AMMediaType pmt = null;
                getPinMediaType(DTVVideoOutPin, MediaType.Video, MediaSubType.YUY2, Guid.Empty, ref pmt);
                FrameSize SourceFrameSize;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    int VideoWidth  = pvih2.BmiHeader.Width;
                    int VideoHeight = pvih2.BmiHeader.Height;
                    SourceFrameSize = new FrameSize(VideoWidth, VideoHeight);
                }
                else
                {
                    SourceFrameSize = new FrameSize(320, 240);
                }

                // Free up
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // Link up audio
                // 2. Audio Decrypt -> Audio decoder
                IPin MainAudioInPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Input, 0);
                FilterGraphTools.ConnectFilters(graphbuilder, DecAudioOutPin, MainAudioInPin, false);

                // Add ASF Writer
                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter);                            // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter; // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string          destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Make the final links:  DTV => writer
                SendDebugMessage("Linking audio/video through to decoder and writer", 0);
                IPin DTVAudioOutPin   = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Output, 0);
                IPin ASFAudioInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);
                IPin ASFVideoInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                FilterGraphTools.ConnectFilters(graphbuilder, DTVAudioOutPin, ASFAudioInputPin, false);
                if (ASFVideoInputPin != null)
                {
                    FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, ASFVideoInputPin, false);
                }

                // Configure ASFWriter
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                // dec
                Marshal.ReleaseComObject(DecAudioInPin); DecAudioInPin   = null;
                Marshal.ReleaseComObject(DecVideoInPin); DecVideoInPin   = null;
                Marshal.ReleaseComObject(DecVideoOutPin); DecVideoOutPin = null;
                Marshal.ReleaseComObject(DecAudioOutPin); DecAudioOutPin = null;
                // dtv
                Marshal.ReleaseComObject(MainAudioInPin); MainAudioInPin = null;
                Marshal.ReleaseComObject(DTVVideoInPin); DTVVideoInPin   = null;
                Marshal.ReleaseComObject(DTVVideoOutPin); DTVVideoOutPin = null;
                Marshal.ReleaseComObject(DTVAudioOutPin); DTVAudioOutPin = null;
                // asf
                Marshal.ReleaseComObject(ASFAudioInputPin); ASFAudioInputPin = null;
                Marshal.ReleaseComObject(ASFVideoInputPin); ASFVideoInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return(DSStreamResultCodes.ErrorExceptionOccurred);
            }

            return(DSStreamResultCodes.OK);
        }
Exemple #31
0
		/// <summary>
		///  Retrieves the value of one member of the IAMStreamConfig format block.
		///  Helper function for several properties that expose
		///  video/audio settings from IAMStreamConfig.GetFormat().
		///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
		///  AMMediaType.formatPtr points to a format block structure.
		///  This format block structure may be one of several 
		///  types, the type being determined by AMMediaType.formatType.
		/// </summary>
		protected object getStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName)
		{
			if ( streamConfig == null )
				throw new NotSupportedException();
			assertStopped();

			derenderGraph();

			object returnValue = null;
#if DSHOWNET
			IntPtr pmt = IntPtr.Zero;
#endif
			AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
                int hr = streamConfig.GetFormat(out pmt);
#else
				int hr = streamConfig.GetFormat(out mediaType);
#endif
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );

#if DSHOWNET
				Marshal.PtrToStructure( pmt, mediaType );
#endif

				// The formatPtr member points to different structures
				// dependingon the formatType
				object formatStruct;
				if ( mediaType.formatType == FormatType.WaveEx )
					formatStruct = new WaveFormatEx();
				else if ( mediaType.formatType == FormatType.VideoInfo )
					formatStruct = new VideoInfoHeader();
				else if ( mediaType.formatType == FormatType.VideoInfo2 )
					formatStruct = new VideoInfoHeader2();
				else
					throw new NotSupportedException( "This device does not support a recognized format block." );

				// Retrieve the nested structure
				Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

				// Find the required field
				Type structType = formatStruct.GetType();
				FieldInfo fieldInfo = structType.GetField( fieldName );
				if ( fieldInfo == null )
					throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." );

				// Extract the field's current value
				returnValue = fieldInfo.GetValue( formatStruct ); 
						
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
#if DSHOWNET
				Marshal.FreeCoTaskMem( pmt );
#endif
			}
			renderGraph();
			startPreviewIfNeeded();

			return( returnValue );
		}
Exemple #32
0
        /// <summary>
        /// 构建捕获图
        /// </summary>
        public void CreateGraph(string Resolution, int Frames)
        {
            if (graphBuilder != null)
            {
                return;
            }
            graphBuilder        = (IFilterGraph2) new FilterGraph();                  // 获取IFilterGraph2接口对象
            captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); //获取ICaptureGraphBuilder2接口对象


            int hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder);//将过滤器图形附加到捕获图

            DsError.ThrowExceptionForHR(hr);

            //将视频输入设备添加到图形
            hr = graphBuilder.AddFilter(theDevice, "source filter");
            DsError.ThrowExceptionForHR(hr);

            //将视频压缩器过滤器添加到图形
            if (theDeviceCompressor != null)
            {
                hr = graphBuilder.AddFilter(theDeviceCompressor, "devicecompressor filter");
                DsError.ThrowExceptionForHR(hr);
            }
            //将音频输入设备添加到图形
            if (theAudio != null)
            {
                hr = graphBuilder.AddFilter(theAudio, "audio filter");
                DsError.ThrowExceptionForHR(hr);
            }
            //将音频压缩器过滤器添加到图形
            if (theAudioCompressor != null)
            {
                hr = graphBuilder.AddFilter(theAudioCompressor, "audiocompressor filter");
                DsError.ThrowExceptionForHR(hr);
            }
            mediaControl = (IMediaControl)this.graphBuilder;//获取IMediaControl接口对象

            m_PictureReady = new ManualResetEvent(false);

            sampleGrabber = new SampleGrabber() as ISampleGrabber;                            //添加采样器接口.
            ConfigureSampleGrabber(sampleGrabber);                                            // 配置SampleGrabber。添加预览回调
            hr = this.graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "Frame Callback"); // 将SampleGrabber添加到图形.
            DsError.ThrowExceptionForHR(hr);


            //读取摄像头配置信息
            AMMediaType mediaType = new AMMediaType();
            object      oVideoStreamConfig;//视频流配置信息

            hr = captureGraphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, theDevice, typeof(IAMStreamConfig).GUID, out oVideoStreamConfig);
            if (!(oVideoStreamConfig is IAMStreamConfig videoStreamConfig))
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            //test Failed
            //读取摄像头中的配置
            //int iCount;
            //int iSize;
            //hr = videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
            //if (hr != 0)
            //    Marshal.ThrowExceptionForHR(hr);
            //if (iSize == Marshal.SizeOf(typeof(VideoStreamConfigCaps)))//?? sizeof
            //{
            //    IntPtr sccPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps)));
            //    for (int iFormat = 0; iFormat < iCount; iFormat++)
            //    {
            //        VideoStreamConfigCaps scc=new VideoStreamConfigCaps();
            //        IntPtr pmtConfigIntPtr;
            //        AMMediaType pmtConfig = new AMMediaType() ;
            //        hr = videoStreamConfig.GetStreamCaps(iFormat, out pmtConfigIntPtr, sccPtr);
            //        if (hr != 0)
            //            Marshal.ThrowExceptionForHR(hr);
            //        Marshal.PtrToStructure(pmtConfigIntPtr, pmtConfig);
            //        //读取配置值
            //        if (pmtConfig.majorType == MediaType.Video && pmtConfig.subType== MediaSubType.RGB24 && pmtConfig.formatType == FormatType.VideoInfo)
            //        {


            //        }
            //    }
            //}
            //test end


            hr = videoStreamConfig.GetFormat(out mediaType);
            if (hr != 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }



            DsError.ThrowExceptionForHR(hr);


            // The formatPtr member points to different structures
            // dependingon the formatType
            object formatStruct;

            if (mediaType.formatType == FormatType.WaveEx)
            {
                formatStruct = new WaveFormatEx();
            }
            else if (mediaType.formatType == FormatType.VideoInfo)
            {
                formatStruct = new VideoInfoHeader();
            }
            else if (mediaType.formatType == FormatType.VideoInfo2)
            {
                formatStruct = new VideoInfoHeader2();
            }
            else
            {
                throw new NotSupportedException("This device does not support a recognized format block.");
            }

            // Retrieve the nested structure
            Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);


            if (formatStruct is VideoInfoHeader)
            {
                VideoInfoHeader videoInfoHeader = formatStruct as VideoInfoHeader;
                // 设置帧率
                if (Frames > 0)
                {
                    videoInfoHeader.AvgTimePerFrame = 10000000 / Frames;
                }
                // 设置宽度 设置高度
                if (!string.IsNullOrEmpty(Resolution) && Resolution.Split('*').Length > 1)
                {
                    videoInfoHeader.BmiHeader.Width  = Convert.ToInt32(Resolution.Split('*')[0]);
                    videoInfoHeader.BmiHeader.Height = Convert.ToInt32(Resolution.Split('*')[1]);
                }
                // 复制媒体结构
                Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false);
            }
            else if (formatStruct is VideoInfoHeader2)
            {
                VideoInfoHeader2 videoInfoHeader = formatStruct as VideoInfoHeader2;
                // 设置帧率
                if (Frames > 0)
                {
                    videoInfoHeader.AvgTimePerFrame = 10000000 / Frames;
                }
                // 设置宽度 设置高度
                if (!string.IsNullOrEmpty(Resolution) && Resolution.Split('*').Length > 1)
                {
                    videoInfoHeader.BmiHeader.Width  = Convert.ToInt32(Resolution.Split('*')[0]);
                    videoInfoHeader.BmiHeader.Height = Convert.ToInt32(Resolution.Split('*')[1]);
                }
                // 复制媒体结构
                Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false);
            }


            //VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
            //Marshal.PtrToStructure(mediaType.formatPtr, videoInfoHeader);


            //if (Frames > 0)
            //{
            //    videoInfoHeader.AvgTimePerFrame = 10000000 / Frames;
            //}
            //// 设置宽度 设置高度
            //if (!string.IsNullOrEmpty(Resolution) && Resolution.Split('*').Length > 1)
            //{
            //    videoInfoHeader.BmiHeader.Width = Convert.ToInt32(Resolution.Split('*')[0]);
            //    videoInfoHeader.BmiHeader.Height = Convert.ToInt32(Resolution.Split('*')[1]);
            //}
            //// 复制媒体结构
            //Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false);
            // 设置新的视频格式
            hr = videoStreamConfig.SetFormat(mediaType);
            DsError.ThrowExceptionForHR(hr);
            DsUtils.FreeAMMediaType(mediaType);
            mediaType = null;
        }
Exemple #33
0
        public void SetDeinterlaceMode()
        {
            if (!GUIGraphicsContext.IsEvr)
            {
                if (!_isVmr9Initialized)
                {
                    return;
                }
                Log.Debug("VMR9: SetDeinterlaceMode()");
                IVMRDeinterlaceControl9 deinterlace = (IVMRDeinterlaceControl9)_vmr9Filter;
                IPin InPin = null;
                int  hr    = _vmr9Filter.FindPin("VMR Input0", out InPin);
                if (hr != 0)
                {
                    Log.Error("VMR9: failed finding InPin {0:X}", hr);
                }
                AMMediaType mediatype = new AMMediaType();
                InPin.ConnectionMediaType(mediatype);
                //Start by getting the media type of the video stream.
                //Only VideoInfoHeader2 formats can be interlaced.
                if (mediatype.formatType == FormatType.VideoInfo2)
                {
                    Log.Debug("VMR9: SetDeinterlaceMode - FormatType = VideoInfo2");
                    int numModes = 0;
                    VideoInfoHeader2 VideoHeader2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(mediatype.formatPtr, VideoHeader2);
                    VMR9VideoDesc VideoDesc = new VMR9VideoDesc();
                    // If the FormatType is VideoInfo2, check the dwInterlaceFlags field for the AMInterlace.IsInterlaced flag.
                    //The presence of this flag indicates the video is interlaced.
                    if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                    {
                        Log.Debug("VMR9: SetDeinterlaceMode - Interlaced frame detected");
                        //Fill in the VMR9VideoDesc structure with a description of the video stream.
                        VideoDesc.dwSize         = Marshal.SizeOf(VideoDesc);    // dwSize: Set this field to sizeof(VMR9VideoDesc).
                        VideoDesc.dwSampleWidth  = VideoHeader2.BmiHeader.Width; // dwSampleWidth: Set this field to pBMI->biWidth.
                        VideoDesc.dwSampleHeight = VideoHeader2.BmiHeader.Height;
                        // dwSampleHeight: Set this field to abs(pBMI->biHeight).
                        //SampleFormat: This field describes the interlace characteristics of the media type.
                        //Check the dwInterlaceFlags field in the VIDEOINFOHEADER2 structure, and set SampleFormat equal to the equivalent VMR9_SampleFormat flag.
                        if ((VideoHeader2.InterlaceFlags & AMInterlace.IsInterlaced) != 0)
                        {
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.DisplayModeBobOnly) == 0)
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.ProgressiveFrame;
                            }
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.OneFieldPerSample) != 0)
                            {
                                if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                                {
                                    VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleEven;
                                }
                                else
                                {
                                    VideoDesc.SampleFormat = VMR9SampleFormat.FieldSingleOdd;
                                }
                            }
                            if ((VideoHeader2.InterlaceFlags & AMInterlace.Field1First) != 0)
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedEvenFirst;
                            }
                            else
                            {
                                VideoDesc.SampleFormat = VMR9SampleFormat.FieldInterleavedOddFirst;
                            }
                        }
                        //InputSampleFreq: This field gives the input frequency, which can be calculated from the AvgTimePerFrame field in the VIDEOINFOHEADER2 structure.
                        //In the general case, set dwNumerator to 10000000, and set dwDenominator to AvgTimePerFrame.
                        VideoDesc.InputSampleFreq.dwDenominator = 10000000;
                        VideoDesc.InputSampleFreq.dwNumerator   = (int)VideoHeader2.AvgTimePerFrame;
                        //OutputFrameFreq: This field gives the output frequency, which can be calculated from the InputSampleFreq value and the interleaving characteristics of the input stream:
                        //Set OutputFrameFreq.dwDenominator equal to InputSampleFreq.dwDenominator.
                        //If the input video is interleaved, set OutputFrameFreq.dwNumerator to 2 x InputSampleFreq.dwNumerator. (After deinterlacing, the frame rate is doubled.)
                        //Otherwise, set the value to InputSampleFreq.dwNumerator.
                        VideoDesc.OutputFrameFreq.dwDenominator = 10000000;
                        VideoDesc.OutputFrameFreq.dwNumerator   = (int)VideoHeader2.AvgTimePerFrame * 2;
                        VideoDesc.dwFourCC = VideoHeader2.BmiHeader.Compression; //dwFourCC: Set this field to pBMI->biCompression.
                        //Pass the structure to the IVMRDeinterlaceControl9::GetNumberOfDeinterlaceModes method.
                        //Call the method twice. The first call returns the number of deinterlace modes the hardware supports for the specified format.
                        hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, null);
                        if (hr == 0 && numModes != 0)
                        {
                            Guid[] modes = new Guid[numModes];
                            {
                                //Allocate an array of GUIDs of this size, and call the method again, passing in the address of the array.
                                //The second call fills the array with GUIDs. Each GUID identifies one deinterlacing mode.
                                hr = deinterlace.GetNumberOfDeinterlaceModes(ref VideoDesc, ref numModes, modes);
                                for (int i = 0; i < numModes; i++)
                                {
                                    //To get the capabiltiies of a particular mode, call the IVMRDeinterlaceControl9::GetDeinterlaceModeCaps method.
                                    //Pass in the same VMR9VideoDesc structure, along with one of the GUIDs from the array.
                                    //The method fills a VMR9DeinterlaceCaps structure with the mode capabilities.
                                    VMR9DeinterlaceCaps caps = new VMR9DeinterlaceCaps();
                                    caps.dwSize = Marshal.SizeOf(typeof(VMR9DeinterlaceCaps));
                                    hr          = deinterlace.GetDeinterlaceModeCaps(modes[i], ref VideoDesc, ref caps);
                                    if (hr == 0)
                                    {
                                        Log.Debug("VMR9: AvailableDeinterlaceMode - {0}: {1}", i, modes[i]);
                                        switch (caps.DeinterlaceTechnology)
                                        {
                                        //The algorithm is unknown or proprietary
                                        case VMR9DeinterlaceTech.Unknown:
                                        {
                                            Log.Info("VMR9: Unknown H/W de-interlace mode");
                                            break;
                                        }

                                        //The algorithm creates each missing line by repeating the line above it or below it.
                                        //This method creates jagged artifacts and is not recommended.
                                        case VMR9DeinterlaceTech.BOBLineReplicate:
                                        {
                                            Log.Info("VMR9: BOB Line Replicate capable");
                                            break;
                                        }

                                        //The algorithm creates the missing lines by vertically stretching each video field by a factor of two.
                                        //For example, it might average two lines or use a (-1, 9, 9, -1)/16 filter across four lines.
                                        //Slight vertical adjustments are made to ensure that the resulting image does not "bob" up and down
                                        case VMR9DeinterlaceTech.BOBVerticalStretch:
                                        {
                                            Log.Info("VMR9: BOB Vertical Stretch capable");
                                            verticalStretch = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm uses median filtering to recreate the pixels in the missing lines.
                                        case VMR9DeinterlaceTech.MedianFiltering:
                                        {
                                            Log.Info("VMR9: Median Filtering capable");
                                            medianFiltering = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm uses an edge filter to create the missing lines.
                                        //In this process, spatial directional filters are applied to determine the orientation of edges in the picture content.
                                        //Missing pixels are created by filtering along (rather than across) the detected edges.
                                        case VMR9DeinterlaceTech.EdgeFiltering:
                                        {
                                            Log.Info("VMR9: Edge Filtering capable");
                                            break;
                                        }

                                        //The algorithm uses spatial or temporal interpolation, switching between the two on a field-by-field basis, depending on the amount of motion.
                                        case VMR9DeinterlaceTech.FieldAdaptive:
                                        {
                                            Log.Info("VMR9: Field Adaptive capable");
                                            break;
                                        }

                                        //The algorithm uses spatial or temporal interpolation, switching between the two on a pixel-by-pixel basis, depending on the amount of motion.
                                        case VMR9DeinterlaceTech.PixelAdaptive:
                                        {
                                            Log.Info("VMR9: Pixel Adaptive capable");
                                            pixelAdaptive = modes[i].ToString();
                                            break;
                                        }

                                        //The algorithm identifies objects within a sequence of video fields.
                                        //Before it recreates the missing pixels, it aligns the movement axes of the individual objects in the scene to make them parallel with the time axis.
                                        case VMR9DeinterlaceTech.MotionVectorSteered:
                                        {
                                            Log.Info("VMR9: Motion Vector Steered capable");
                                            break;
                                        }
                                        }
                                    }
                                }
                            }
                            //Set the MP preferred h/w de-interlace modes in order of quality
                            //pixel adaptive, then median filtering & finally vertical stretch
                            if (pixelAdaptive != "")
                            {
                                Guid DeinterlaceMode = new Guid(pixelAdaptive);
                                Log.Debug("VMR9: trying pixel adaptive");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: pixel adaptive failed!");
                                }
                                else
                                {
                                    Log.Info("VMR9: setting pixel adaptive succeeded");
                                    medianFiltering = "";
                                    verticalStretch = "";
                                }
                            }
                            if (medianFiltering != "")
                            {
                                Guid DeinterlaceMode = new Guid(medianFiltering);
                                Log.Debug("VMR9: trying median filtering");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: median filtering failed!");
                                }
                                else
                                {
                                    Log.Info("VMR9: setting median filtering succeeded");
                                    verticalStretch = "";
                                }
                            }
                            if (verticalStretch != "")
                            {
                                Guid DeinterlaceMode = new Guid(verticalStretch);
                                Log.Debug("VMR9: trying vertical stretch");
                                hr = deinterlace.SetDeinterlaceMode(0, DeinterlaceMode);
                                if (hr != 0)
                                {
                                    Log.Error("VMR9: Cannot set H/W de-interlace mode - using VMR9 fallback");
                                }
                                Log.Info("VMR9: setting vertical stretch succeeded");
                            }
                        }
                        else
                        {
                            Log.Info("VMR9: No H/W de-interlaced modes supported, using fallback preference");
                        }
                    }
                    else
                    {
                        Log.Info("VMR9: progressive mode detected - no need to de-interlace");
                    }
                }
                //If the format type is VideoInfo, it must be a progressive frame.
                else
                {
                    Log.Info("VMR9: no need to de-interlace this video source");
                }
                DsUtils.FreeAMMediaType(mediatype);
                //release the VMR9 pin
                hr = DirectShowUtil.ReleaseComObject(InPin);

                InPin     = null;
                mediatype = null;
            }
        }
Exemple #34
-1
		/// <summary>
		///  Set the value of one member of the IAMStreamConfig format block.
		///  Helper function for several properties that expose
		///  video/audio settings from IAMStreamConfig.GetFormat().
		///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
		///  AMMediaType.formatPtr points to a format block structure.
		///  This format block structure may be one of several 
		///  types, the type being determined by AMMediaType.formatType.
		/// </summary>
		protected object setStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName, object newValue)
		{
			if ( streamConfig == null )
				throw new NotSupportedException();
			assertStopped();
			derenderGraph();

			object returnValue = null;
#if DSHOWNET
            IntPtr pmt = IntPtr.Zero;
#endif
            AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
                int hr = streamConfig.GetFormat(out pmt);
#else
				int hr = streamConfig.GetFormat(out mediaType);
#endif
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );

#if DSHOWNET
                Marshal.PtrToStructure(pmt, mediaType);
#endif

				// The formatPtr member points to different structures
				// dependingon the formatType
				object formatStruct;
				if ( mediaType.formatType == FormatType.WaveEx )
					formatStruct = new WaveFormatEx();
				else if ( mediaType.formatType == FormatType.VideoInfo )
					formatStruct = new VideoInfoHeader();
				else if ( mediaType.formatType == FormatType.VideoInfo2 )
					formatStruct = new VideoInfoHeader2();
				else
					throw new NotSupportedException( "This device does not support a recognized format block." );

				// Retrieve the nested structure
				Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

				// Find the required field
				Type structType = formatStruct.GetType();
				FieldInfo fieldInfo = structType.GetField( fieldName );
				if ( fieldInfo == null )
					throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." );

				// Update the value of the field
				fieldInfo.SetValue( formatStruct, newValue );

				// Update fields that may depend on specific values of other attributes
				if (mediaType.formatType == FormatType.WaveEx)
				{
					WaveFormatEx waveFmt = formatStruct as WaveFormatEx;
					waveFmt.nBlockAlign = (short)(waveFmt.nChannels * waveFmt.wBitsPerSample / 8);
					waveFmt.nAvgBytesPerSec = waveFmt.nBlockAlign * waveFmt.nSamplesPerSec;
				}

                // PtrToStructure copies the data so we need to copy it back
				Marshal.StructureToPtr( formatStruct, mediaType.formatPtr, false ); 

				// Save the changes
				hr = streamConfig.SetFormat( mediaType );
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
#if DSHOWNET
                Marshal.FreeCoTaskMem(pmt);
#endif
            }
			renderGraph();
			startPreviewIfNeeded();

			return( returnValue );
		}