Beispiel #1
0
        // Retrieve capabilities of a video device
        internal static VideoCapabilities[] FromStreamConfig(IAMStreamConfig videoStreamConfig)
        {
            if (videoStreamConfig == null)
            {
                throw new ArgumentNullException(nameof(videoStreamConfig));
            }

            // ensure this device reports capabilities
            int hr = videoStreamConfig.GetNumberOfCapabilities(out int count, out int size);

            if (hr != 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            if (count <= 0)
            {
                throw new NotSupportedException("This video device does not report capabilities.");
            }

            if (size > Marshal.SizeOf(typeof(VideoStreamConfigCaps)))
            {
                throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure.");
            }

            // group capabilities with similar parameters
            var videocapsList = new Dictionary <string, VideoCapabilities>();

            for (int i = 0; i < count; i++)
            {
                try
                {
                    var vc = new VideoCapabilities(videoStreamConfig, i);

                    string key =
                        $"{vc.FrameSize.Width} x {Math.Abs(vc.FrameSize.Height)} ({vc.AverageFrameRate} fps, {vc.BitCount} bit)";

                    if (!videocapsList.ContainsKey(key))
                    {
                        videocapsList.Add(key, vc);
                    }
                    //else
                    //{
                    //    if ( vc.BitCount > videocapsList[key].BitCount )
                    //    {
                    //        videocapsList[key] = vc;
                    //    }
                    //}
                }
                catch
                {
                }
            }

            var videocaps = new VideoCapabilities[videocapsList.Count];

            videocapsList.Values.CopyTo(videocaps, 0);

            return(videocaps);
        }
Beispiel #2
0
        public void SetAndGetAllAvailableResolution(IPin VideoOutPin)
        {
            int             hr              = 0;
            IAMStreamConfig streamConfig    = (IAMStreamConfig)VideoOutPin;
            AMMediaType     CorectvidFormat = new AMMediaType();
            IntPtr          ptr;

            hr  = streamConfig.GetNumberOfCapabilities(out int piCount, out int piSize);
            ptr = Marshal.AllocCoTaskMem(piSize);
            for (int i = 0; i < piCount; i++)
            {
                hr = streamConfig.GetStreamCaps(i, out AMMediaType searchmedia, ptr);
                VideoInfoHeader v = new VideoInfoHeader();

                Marshal.PtrToStructure(searchmedia.formatPtr, v);
                if (i == 2)// 4
                {
                    CorectvidFormat = searchmedia;
                }
            }
            hr = streamConfig.SetFormat(CorectvidFormat);

            IntPtr          pmt           = IntPtr.Zero;
            AMMediaType     mediaType     = new AMMediaType();
            IAMStreamConfig streamConfig1 = (IAMStreamConfig)VideoOutPin;

            hr = streamConfig1.GetFormat(out mediaType);
            BitmapInfoHeader bmpih = new BitmapInfoHeader();

            Marshal.PtrToStructure(mediaType.formatPtr, bmpih);
            x = bmpih.Width;
            y = bmpih.Height;
        }
        void BuildGraph()
        {
            int         hr;
            IBaseFilter ppFilter;

            DsDevice []   devs;
            IGraphBuilder graphBuilder = new FilterGraph() as IGraphBuilder;

            m_ROT = new DsROTEntry(graphBuilder);
            IFilterGraph2 ifg2 = graphBuilder as IFilterGraph2;

            devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);
            DsDevice dev = devs[0];

            hr = ifg2.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out ppFilter);
            DsError.ThrowExceptionForHR(hr);

            ICaptureGraphBuilder2 captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;

            hr = captureGraphBuilder.SetFiltergraph(graphBuilder);

            object o;

            hr = captureGraphBuilder.FindInterface(null, null, ppFilter, typeof(IAMStreamConfig).GUID, out o);
            DsError.ThrowExceptionForHR(hr);

            m_asc = o as IAMStreamConfig;

            //m_imc = graphBuilder as IMediaControl;
            //hr = m_imc.Run();
            //DsError.ThrowExceptionForHR(hr);
        }
Beispiel #4
0
        // Token: 0x06000040 RID: 64 RVA: 0x000033D0 File Offset: 0x000015D0
        private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution)
        {
            if (resolution == null)
            {
                return;
            }
            int                   num              = 0;
            int                   num2             = 0;
            AMMediaType           ammediaType      = null;
            VideoStreamConfigCaps streamConfigCaps = new VideoStreamConfigCaps();

            streamConfig.GetNumberOfCapabilities(out num, out num2);
            for (int i = 0; i < num; i++)
            {
                try
                {
                    VideoCapabilities b = new VideoCapabilities(streamConfig, i);
                    if (resolution == b && streamConfig.GetStreamCaps(i, out ammediaType, streamConfigCaps) == 0)
                    {
                        break;
                    }
                }
                catch
                {
                }
            }
            if (ammediaType != null)
            {
                streamConfig.SetFormat(ammediaType);
                ammediaType.Dispose();
            }
        }
Beispiel #5
0
        private void SetConfigParameters(ICaptureGraphBuilder2 captureGraphBuilder, IBaseFilter captureFilter, int frameRate, int width, int height)
        {
            object outObject;
            int    hr = captureGraphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, captureFilter, typeof(IAMStreamConfig).GUID, out outObject);

            IAMStreamConfig videoStreamConfig = outObject as IAMStreamConfig;

            if (videoStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            AMMediaType outMedia;

            hr = videoStreamConfig.GetFormat(out outMedia);
            DsError.ThrowExceptionForHR(hr);

            VideoInfoHeader videoInfoHeader = new VideoInfoHeader();

            Marshal.PtrToStructure(outMedia.formatPtr, videoInfoHeader);

            videoInfoHeader.AvgTimePerFrame  = 10000000 / frameRate;
            videoInfoHeader.BmiHeader.Width  = width;
            videoInfoHeader.BmiHeader.Height = height;

            Marshal.StructureToPtr(videoInfoHeader, outMedia.formatPtr, false);

            hr = videoStreamConfig.SetFormat(outMedia);
            DsError.ThrowExceptionForHR(hr);


            DsUtils.FreeAMMediaType(outMedia);
            outMedia = null;
        }
Beispiel #6
0
        private void ConfStreamDimensions(IAMStreamConfig streamConfig)
        {
            AMMediaType media = null;

            DsError.ThrowExceptionForHR(streamConfig.GetFormat(out media));

            try {
                VideoInfoHeader v = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                if (m_configuration.Size.Width > 0)
                {
                    v.BmiHeader.Width = m_configuration.Size.Width;
                }
                if (m_configuration.Size.Height > 0)
                {
                    v.BmiHeader.Height = m_configuration.Size.Height;
                }
                if (m_configuration.BPP > 0)
                {
                    v.BmiHeader.BitCount = m_configuration.BPP;
                }
                if (m_configuration.MediaSubtype != Guid.Empty)
                {
                    media.subType = m_configuration.MediaSubtype;
                }
                //v.AvgTimePerFrame = 10000000 / 30; // 30 fps. FPS might be controlled by the camera, because of lightning exposure may increase and FPS decrease.

                Marshal.StructureToPtr(v, media.formatPtr, false);
                DsError.ThrowExceptionForHR(streamConfig.SetFormat(media));
            } finally {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
        }
Beispiel #7
0
        internal AudioCapabilities(IAMStreamConfig audioStreamConfig)
        {
            if (audioStreamConfig == null)
            {
                throw new ArgumentNullException("audioStreamConfig");
            }
            AMMediaType           mediaType = null;
            AudioStreamConfigCaps caps      = null;
            IntPtr zero = IntPtr.Zero;

            try
            {
                IntPtr ptr2;
                int    num;
                int    num2;
                int    numberOfCapabilities = audioStreamConfig.GetNumberOfCapabilities(out num, out num2);
                if (numberOfCapabilities != 0)
                {
                    Marshal.ThrowExceptionForHR(numberOfCapabilities);
                }
                if (num <= 0)
                {
                    throw new NotSupportedException("This audio device does not report capabilities.");
                }
                if (num2 > Marshal.SizeOf(typeof(AudioStreamConfigCaps)))
                {
                    throw new NotSupportedException("Unable to retrieve audio device capabilities. This audio device requires a larger AudioStreamConfigCaps structure.");
                }
                zero = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(AudioStreamConfigCaps)));
                numberOfCapabilities = audioStreamConfig.GetStreamCaps(0, out ptr2, zero);
                if (numberOfCapabilities != 0)
                {
                    Marshal.ThrowExceptionForHR(numberOfCapabilities);
                }
                mediaType                    = (AMMediaType)Marshal.PtrToStructure(ptr2, typeof(AMMediaType));
                caps                         = (AudioStreamConfigCaps)Marshal.PtrToStructure(zero, typeof(AudioStreamConfigCaps));
                this.MinimumChannels         = caps.MinimumChannels;
                this.MaximumChannels         = caps.MaximumChannels;
                this.ChannelsGranularity     = caps.ChannelsGranularity;
                this.MinimumSampleSize       = caps.MinimumBitsPerSample;
                this.MaximumSampleSize       = caps.MaximumBitsPerSample;
                this.SampleSizeGranularity   = caps.BitsPerSampleGranularity;
                this.MinimumSamplingRate     = caps.MinimumSampleFrequency;
                this.MaximumSamplingRate     = caps.MaximumSampleFrequency;
                this.SamplingRateGranularity = caps.SampleFrequencyGranularity;
            }
            finally
            {
                if (zero != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(zero);
                }
                zero = IntPtr.Zero;
                if (mediaType != null)
                {
                    DsUtils.FreeAMMediaType(mediaType);
                }
                mediaType = null;
            }
        }
Beispiel #8
0
        /// <summary>
        /// Set video type for the specified pin interface
        /// </summary>
        /// <param name="streamConfig"></param>
        /// <param name="newValue"></param>
        /// <returns></returns>
        public bool setMediaSubType(IAMStreamConfig streamConfig, Guid newValue)
        {
            IntPtr      pmt       = IntPtr.Zero;
            AMMediaType mediaType = new AMMediaType();

            try
            {
                // Get the current format info
                int hr = streamConfig.GetFormat(out pmt);
                if (hr < 0)
                {
                    return(false);
                }
                Marshal.PtrToStructure(pmt, mediaType);

                // Change the media subtype
                // Each enum value has a Guid associated with it
                // We store the Guid as a string in a LabelAttribute
                // applied to each enum value. See the ColorSpaceEnum.
                mediaType.subType = newValue;

                // Save the changes
                hr = streamConfig.SetFormat(mediaType);
                if (hr < 0)
                {
                    return(false);
                }
            }
            finally
            {
                DsUtils.FreeAMMediaType(mediaType);
                Marshal.FreeCoTaskMem(pmt);
            }
            return(true);
        }
Beispiel #9
0
        public void SetupCaptureFormat(IBaseFilter filter)
        {
            AppLogger.Message("VideoCaptureDevice:get Video stream control interface (IAMStreamConfig)");
            object o;
            int    hr = _captureGraphBuilder.FindInterface(PinCategory.Preview, null, (IBaseFilter)filter, typeof(IAMStreamConfig).GUID, out o);

            if (hr == 0)
            {
                _interfaceStreamConfigVideoCapture = o as IAMStreamConfig;
                if (_interfaceStreamConfigVideoCapture != null)
                {
                    AppLogger.Message(String.Format("FrameRate before set {0}", FrameRate));
                    FrameRate = 15d;
                    //FrameSize = new Size(720, 576);
                    // Size size = FrameSize;
                    // if (size.Width != 720 || size.Height != 576)
                    //  {
                    //  FrameSize = new Size(640, 480);
                    //   FrameSize = new Size(352, 240);
                    // }
                }
            }
            else
            {
                AppLogger.Message("Failed to find Preview interface on filter");
            }
            AppLogger.Message(String.Format("FrameRate after set {0}", FrameRate));
            return;
        }
Beispiel #10
0
        // Retrieve capabilities of a video device
        static internal VideoCapabilities[] FromStreamConfig(IAMStreamConfig videoStreamConfig)
        {
            if (videoStreamConfig == null)
            {
                throw new ArgumentNullException("videoStreamConfig");
            }

            // ensure this device reports capabilities
            int count, size;
            int hr = videoStreamConfig.GetNumberOfCapabilities(out count, out size);

            if (hr != 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            if (count <= 0)
            {
                throw new NotSupportedException("This video device does not report capabilities.");
            }

            if (size > Marshal.SizeOf(typeof(VideoStreamConfigCaps)))
            {
                throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure.");
            }

            // group capabilities with similar parameters
            Dictionary <uint, VideoCapabilities> videocapsList = new Dictionary <uint, VideoCapabilities>();

            for (int i = 0; i < count; i++)
            {
                try
                {
                    VideoCapabilities vc = new VideoCapabilities(videoStreamConfig, i);

                    uint key = (((uint)vc.FrameSize.Height) << 32) |
                               (((uint)vc.FrameSize.Width) << 16);

                    if (!videocapsList.ContainsKey(key))
                    {
                        videocapsList.Add(key, vc);
                    }
                    else
                    {
                        if (vc.BitCount > videocapsList[key].BitCount)
                        {
                            videocapsList[key] = vc;
                        }
                    }
                }
                catch
                {
                }
            }

            VideoCapabilities[] videocaps = new VideoCapabilities[videocapsList.Count];
            videocapsList.Values.CopyTo(videocaps, 0);

            return(videocaps);
        }
Beispiel #11
0
        /// <summary>
        /// Constructs the _AMMediaType (adds pbFormat to it), sets it, then frees it
        /// </summary>
        public static void SetMediaType(IAMStreamConfig iSC, _AMMediaType mt, object formatBlock)
        {
            System.Diagnostics.Debug.Assert(mt.pbFormat == IntPtr.Zero && mt.cbFormat == 0);

            mt = MediaType.Construct(mt, formatBlock);
            SetMediaType(iSC, ref mt);
        }
Beispiel #12
0
        protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            if (streamConfig == null)
            {
                throw new NotSupportedException();
            }
            this.assertStopped();
            this.derenderGraph();
            IntPtr      zero      = IntPtr.Zero;
            AMMediaType structure = new AMMediaType();

            try
            {
                object obj2;
                int    format = streamConfig.GetFormat(out zero);
                if (format != 0)
                {
                    Marshal.ThrowExceptionForHR(format);
                }
                Marshal.PtrToStructure(zero, structure);
                if (structure.formatType == FormatType.WaveEx)
                {
                    obj2 = new WaveFormatEx();
                }
                else if (structure.formatType == FormatType.VideoInfo)
                {
                    obj2 = new VideoInfoHeader();
                }
                else
                {
                    if (structure.formatType != FormatType.VideoInfo2)
                    {
                        throw new NotSupportedException("This device does not support a recognized format block.");
                    }
                    obj2 = new VideoInfoHeader2();
                }
                Marshal.PtrToStructure(structure.formatPtr, obj2);
                FieldInfo field = obj2.GetType().GetField(fieldName);
                if (field == null)
                {
                    throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");
                }
                field.SetValue(obj2, newValue);
                Marshal.StructureToPtr(obj2, structure.formatPtr, false);
                format = streamConfig.SetFormat(structure);
                if (format != 0)
                {
                    Marshal.ThrowExceptionForHR(format);
                }
            }
            finally
            {
                DsUtils.FreeAMMediaType(structure);
                Marshal.FreeCoTaskMem(zero);
            }
            this.renderStream = false;
            this.renderGraph();
            this.startPreviewIfNeeded();
            return(null);
        }
        private void SetPinParameters(IPin pin, int iWidth, int iHeight, short iBPP)
        {
            int             hr;
            IAMStreamConfig videoStreamConfig = pin as IAMStreamConfig;

            hr = videoStreamConfig.GetFormat(out AMMediaType media);
            DsError.ThrowExceptionForHR(hr);
            try
            {
                VideoInfoHeader v = new VideoInfoHeader();
                Marshal.PtrToStructure(media.formatPtr, v);
                if (iWidth > 0)
                {
                    v.BmiHeader.Width = iWidth;
                }
                if (iHeight > 0)
                {
                    v.BmiHeader.Height = iHeight;
                }
                if (iBPP > 0)
                {
                    v.BmiHeader.BitCount = iBPP;
                }
                media.majorType = MediaType.Video;
                media.subType   = MediaSubType.MJPG;
                Marshal.StructureToPtr(v, media.formatPtr, false);
                hr = videoStreamConfig.SetFormat(media);
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                DsUtils.FreeAMMediaType(media);
            }
        }
Beispiel #14
0
        private void SetPinVideoImageSize(string pinName)
        {
            int  hr;
            IPin pin = DsFindPin.ByDirection(_videoEncoder, PinDirection.Output, 0);

            if (pin != null)
            {
                AppLogger.Message("VideoCaptureDevice: found output pin");
            }

            // get video stream interfaces
            AppLogger.Message("VideoCaptureDevice:get Video stream control interface (IAMStreamConfig)");

            IAMStreamConfig streamConfig = (IAMStreamConfig)pin;
            AMMediaType     media;

            hr = streamConfig.GetFormat(out media);
            DsError.ThrowExceptionForHR(hr);
            VideoInfoHeader v = new VideoInfoHeader();

            v.BmiHeader        = new BitmapInfoHeader();
            v.BmiHeader.Width  = 320;
            v.BmiHeader.Height = 240;
            media.formatPtr    = Marshal.AllocCoTaskMem(1024);
            Marshal.StructureToPtr(v, media.formatPtr, true);
            hr = streamConfig.SetFormat(media);
            DsError.ThrowExceptionForHR(hr);
            DsUtils.FreeAMMediaType(media);
        }
Beispiel #15
0
        private void ConfigureCompressor()
        {
            // Have the compressor use the same height and width settings as the device

            // Because these are structs that access their members through properties
            // some of the properties (like BitmapInfo) are copied, so we work on the
            // copy and then restore it at the end
            BITMAPINFOHEADER bmih = cVI.BitmapInfo;

            bmih.Width     = SystemInformation.PrimaryMonitorSize.Width;
            bmih.Height    = SystemInformation.PrimaryMonitorSize.Height;
            bmih.Size      = (uint)Marshal.SizeOf(typeof(BITMAPINFOHEADER));
            bmih.Planes    = 1;
            cVI.BitmapInfo = bmih;

            // Configure the bit rate
            cVI.BitRate = (uint)(hsbBitRate.Value * 1024); // Change to kilobits

            // Update the structure in memory
            Marshal.StructureToPtr(cVI, cMT.pbFormat, false);

            // Allow compressor specific configuration
            // e.g. WM9+ requires extra configuration, others may as well
            ConfigureWMScreenEncoder();
            Console.WriteLine(MediaType.Dump(cMT));

            // Use the structure in the compressor
            IAMStreamConfig iSC = (IAMStreamConfig)cOutputPin;

            iSC.SetFormat(ref cMT);

            IAMVideoCompression iVC = (IAMVideoCompression)cOutputPin;
        }
Beispiel #16
0
 private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution)
 {
     if (!(resolution == null))
     {
         int                   count            = 0;
         int                   size             = 0;
         AMMediaType           mediaType        = null;
         VideoStreamConfigCaps streamConfigCaps = new VideoStreamConfigCaps();
         streamConfig.GetNumberOfCapabilities(out count, out size);
         for (int i = 0; i < count; i++)
         {
             try
             {
                 VideoCapabilities b = new VideoCapabilities(streamConfig, i);
                 if (resolution == b && streamConfig.GetStreamCaps(i, out mediaType, streamConfigCaps) == 0)
                 {
                     break;
                 }
             }
             catch
             {
             }
         }
         if (mediaType != null)
         {
             streamConfig.SetFormat(mediaType);
             mediaType.Dispose();
         }
     }
 }
Beispiel #17
0
        // Set the Framerate, and video size
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, AMMediaType media)
        {
            int    hr;
            object o;

            // Find the stream config interface
            hr = capGraph.FindInterface(
                PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);

            IAMStreamConfig videoStreamConfig = o as IAMStreamConfig;

            if (videoStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            // Set the new format
            try
            {
                hr = videoStreamConfig.SetFormat(media);
                DsError.ThrowExceptionForHR(hr);
            }
            catch { }



            DsUtils.FreeAMMediaType(media);
            media = null;
        }
        /// <summary>
        /// Checks the capabilites of a possibe available stream config interface
        /// </summary>
        /// <param name="graph">The stored graph</param>
        /// <param name="capBuilder">The capture graph builder</param>
        private void CheckCapabilitiesStreamConfig(Graph graph, ICaptureGraphBuilder2 capBuilder)
        {
            DsGuid cat = new DsGuid(PinCategory.Capture);
            Guid   iid = typeof(IAMStreamConfig).GUID;
            object o;
            int    hr = capBuilder.FindInterface(cat, null, _filterVideoCapture, iid, out o);

            if (hr == 0)
            {
                _streamConfig = o as IAMStreamConfig;
                if (_streamConfig == null)
                {
                    _imageWidth = -1;
                    _frameRate  = -1;
                }
            }
            else
            {
                _imageWidth = -1;
                _frameRate  = -1;
            }
            graph.Capture.ImageWidth  = _imageWidth;
            graph.Capture.ImageHeight = _imageHeight;
            graph.Capture.FrameRate   = _frameRate;
        }
Beispiel #19
0
        private VideoOutPinConfiguration[] GetVideoOutPins(IBaseFilter filter)
        {
            List <VideoOutPinConfiguration> video_out_pins = new List <VideoOutPinConfiguration>();

            IEnumPins iterator;

            IPin[] pins = new IPin[1];
            filter.EnumPins(out iterator);
            while (iterator.Next(1, pins, IntPtr.Zero) == 0)
            {
                PinDirection pin_direction;
                pins[0].QueryDirection(out pin_direction);
                if (pin_direction == PinDirection.Output)
                {
                    int             caps_count;
                    int             caps_size;
                    IAMStreamConfig config = (IAMStreamConfig)pins[0];
                    config.GetNumberOfCapabilities(out caps_count, out caps_size);
                    AMMediaType type   = null;
                    IntPtr      buffer = Marshal.AllocCoTaskMem(caps_size);
                    for (int i = 0; i < caps_count; i++)
                    {
                        config.GetStreamCaps(i, out type, buffer);
                        VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(type.formatPtr, typeof(VideoInfoHeader));
                        if (header.BmiHeader.Width > 0)
                        {
                            video_out_pins.Add(new VideoOutPinConfiguration(filter, pins[0], i, header));
                        }
                    }
                    Marshal.FreeCoTaskMem(buffer);
                    DsUtils.FreeAMMediaType(type);
                }
            }
            return(video_out_pins.ToArray());
        }
Beispiel #20
0
        private void InitCaptureInterface()
        {
            // release com object (useless here but can't hurt)
            Cleanup(true);

            this.fmc = new FilgraphManagerClass();

            // create the cg object and add the filter graph to it
            Type t = Type.GetTypeFromCLSID(CLSID_CaptureGraphBuilder2);

            this.icgb = (ICaptureGraphBuilder2)Activator.CreateInstance(t);

            t        = Type.GetTypeFromCLSID(CLSID_SampleGrabber);
            this.isg = (ISampleGrabber)Activator.CreateInstance(t);

            // source filter (the capture device)
            this.sf = (IBaseFilter)this.SourceFilterList[this.cbxDevice.SelectedIndex];
            // sample grabber filter
            this.sgf = (IBaseFilter)this.isg;

            object o = null;

            this.icgb.RemoteFindInterface(ref PIN_CATEGORY_CAPTURE, ref MEDIATYPE_Video, sf, ref IID_IAMStreamConfig, out o);
            this.iamsc = (IAMStreamConfig)o;

            // set sample grabber media type
            this.SGMediaType            = new _AMMediaType();
            this.SGMediaType.majortype  = MEDIATYPE_Video;
            this.SGMediaType.subtype    = MEDIASUBTYPE_RGB24;
            this.SGMediaType.formattype = FORMAT_VideoInfo;
            this.isg.SetMediaType(ref SGMediaType);

            this.isg.SetOneShot(0);
            this.isg.SetBufferSamples(1);
        }
        // Retrieve capabilities of a video device
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index)
        {
            AMMediaType           mediaType = null;
            VideoStreamConfigCaps caps      = new VideoStreamConfigCaps( );

            try
            {
                // retrieve capabilities struct at the specified index
                int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps);

                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // extract info
                FrameSize    = caps.InputSize;
                MaxFrameRate = (int)(10000000 / caps.MinFrameInterval);
            }
            finally
            {
                if (mediaType != null)
                {
                    mediaType.Dispose( );
                }
            }
        }
Beispiel #22
0
        bool QueryResolution(IPin sourcePin, out int width, out int height)
        {
            IAMStreamConfig streamConfig = sourcePin as IAMStreamConfig;

            if (streamConfig != null)
            {
                AMMediaType format;
                streamConfig.GetFormat(out format);
                var iidVideoInfoHeader  = typeof(VideoInfoHeader).GUID;
                var iidVideoInfoHeader2 = typeof(VideoInfoHeader2).GUID;
                if (format.formatType == iidVideoInfoHeader2)
                {
                    var videoInfo = Marshal.PtrToStructure <VideoInfoHeader2>(format.formatPtr);

                    width  = videoInfo.BmiHeader.Width;
                    height = videoInfo.BmiHeader.Height;
                    return(true);
                }
                else if (format.formatType == iidVideoInfoHeader)
                {
                    var videoInfo = Marshal.PtrToStructure <VideoInfoHeader>(format.formatPtr);

                    width  = videoInfo.BmiHeader.Width;
                    height = videoInfo.BmiHeader.Height;
                    return(true);
                }
            }
            width  = 0;
            height = 0;
            return(false);
        }
        // Retrieve capabilities of a video device
        static internal VideoCapabilities[] FromStreamConfig(IAMStreamConfig videoStreamConfig)
        {
            if (videoStreamConfig == null)
                throw new ArgumentNullException("videoStreamConfig");

            // ensure this device reports capabilities
            int count, size;
            int hr = videoStreamConfig.GetNumberOfCapabilities(out count, out size);

            if (hr != 0)
                Marshal.ThrowExceptionForHR(hr);

            if (count <= 0)
                throw new NotSupportedException("This video device does not report capabilities.");

            if (size > Marshal.SizeOf(typeof(VideoStreamConfigCaps)))
                throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure.");

            var videocapsList = from i in Enumerable.Range(0, count)
                                let vc = new VideoCapabilities(videoStreamConfig, i)
                                // where vc.MediaType.SubType == MediaSubType.RGB24
                                select vc;
            
            return videocapsList.ToArray();
        }
Beispiel #24
0
        private static void SetSourceParams(IPin pinSourceCapture, Camera_NET.Resolution resolution_desired)
        {
            AMMediaType pmt  = null;
            AMMediaType ppmt = null;
            IntPtr      zero = IntPtr.Zero;
            bool        flag = false;

            try
            {
                IAMStreamConfig config = pinSourceCapture as IAMStreamConfig;
                DsError.ThrowExceptionForHR(config.SetFormat(null));
                int piCount = 0;
                int piSize  = 0;
                DsError.ThrowExceptionForHR(config.GetNumberOfCapabilities(out piCount, out piSize));
                for (int i = 0; i < piCount; i++)
                {
                    zero = Marshal.AllocCoTaskMem(piSize);
                    config.GetStreamCaps(i, out ppmt, zero);
                    FreeSCCMemory(ref zero);
                    bool flag2 = false;
                    bool flag3 = false;
                    bool flag4 = false;
                    AnalyzeMediaType(ppmt, resolution_desired, out flag2, out flag3, out flag4);
                    if (flag2 && flag4)
                    {
                        if (flag3)
                        {
                            DsError.ThrowExceptionForHR(config.SetFormat(ppmt));
                            flag = true;
                            break;
                        }
                        if (pmt == null)
                        {
                            pmt  = ppmt;
                            ppmt = null;
                        }
                    }
                    FreeMediaType(ref ppmt);
                }
                if (!flag)
                {
                    if (pmt == null)
                    {
                        throw new Exception("Camera doesn't support media type with requested resolution and bits per pixel.");
                    }
                    DsError.ThrowExceptionForHR(config.SetFormat(pmt));
                }
            }
            catch
            {
                throw;
            }
            finally
            {
                FreeMediaType(ref ppmt);
                FreeMediaType(ref pmt);
                FreeSCCMemory(ref zero);
            }
        }
 internal AudioCapabilities(IAMStreamConfig audioStreamConfig)
 {
     if (audioStreamConfig == null)
     {
         throw new ArgumentNullException("audioStreamConfig");
     }
     AMMediaType mediaType = null;
     AudioStreamConfigCaps caps = null;
     IntPtr zero = IntPtr.Zero;
     try
     {
         IntPtr ptr2;
         int num;
         int num2;
         int numberOfCapabilities = audioStreamConfig.GetNumberOfCapabilities(out num, out num2);
         if (numberOfCapabilities != 0)
         {
             Marshal.ThrowExceptionForHR(numberOfCapabilities);
         }
         if (num <= 0)
         {
             throw new NotSupportedException("This audio device does not report capabilities.");
         }
         if (num2 > Marshal.SizeOf(typeof(AudioStreamConfigCaps)))
         {
             throw new NotSupportedException("Unable to retrieve audio device capabilities. This audio device requires a larger AudioStreamConfigCaps structure.");
         }
         zero = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(AudioStreamConfigCaps)));
         numberOfCapabilities = audioStreamConfig.GetStreamCaps(0, out ptr2, zero);
         if (numberOfCapabilities != 0)
         {
             Marshal.ThrowExceptionForHR(numberOfCapabilities);
         }
         mediaType = (AMMediaType) Marshal.PtrToStructure(ptr2, typeof(AMMediaType));
         caps = (AudioStreamConfigCaps) Marshal.PtrToStructure(zero, typeof(AudioStreamConfigCaps));
         this.MinimumChannels = caps.MinimumChannels;
         this.MaximumChannels = caps.MaximumChannels;
         this.ChannelsGranularity = caps.ChannelsGranularity;
         this.MinimumSampleSize = caps.MinimumBitsPerSample;
         this.MaximumSampleSize = caps.MaximumBitsPerSample;
         this.SampleSizeGranularity = caps.BitsPerSampleGranularity;
         this.MinimumSamplingRate = caps.MinimumSampleFrequency;
         this.MaximumSamplingRate = caps.MaximumSampleFrequency;
         this.SamplingRateGranularity = caps.SampleFrequencyGranularity;
     }
     finally
     {
         if (zero != IntPtr.Zero)
         {
             Marshal.FreeCoTaskMem(zero);
         }
         zero = IntPtr.Zero;
         if (mediaType != null)
         {
             DsUtils.FreeAMMediaType(mediaType);
         }
         mediaType = null;
     }
 }
Beispiel #26
0
        // Set frame's size and rate for the specified stream configuration
        private void SetFrameSizeAndRate(IAMStreamConfig streamConfig, Size size, int frameRate)
        {
            bool        sizeSet = false;
            AMMediaType mediaType;

            // get current format
            streamConfig.GetFormat(out mediaType);

            // change frame size if required
            if ((size.Width != 0) && (size.Height != 0))
            {
                // iterate through device's capabilities to find mediaType for desired resolution
                int                   capabilitiesCount = 0, capabilitySize = 0;
                AMMediaType           newMediaType = null;
                VideoStreamConfigCaps caps = new VideoStreamConfigCaps();

                streamConfig.GetNumberOfCapabilities(out capabilitiesCount, out capabilitySize);

                for (int i = 0; i < capabilitiesCount; i++)
                {
                    if (streamConfig.GetStreamCaps(i, out newMediaType, caps) == 0)
                    {
                        if (caps.InputSize == size)
                        {
                            mediaType.Dispose();
                            mediaType = newMediaType;
                            sizeSet   = true;
                            break;
                        }
                        else
                        {
                            newMediaType.Dispose();
                        }
                    }
                }
            }

            VideoInfoHeader infoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

            // try changing size manually if failed finding mediaType before
            if ((size.Width != 0) && (size.Height != 0) && (!sizeSet))
            {
                infoHeader.BmiHeader.Width  = size.Width;
                infoHeader.BmiHeader.Height = size.Height;
            }
            // change frame rate if required
            if (frameRate != 0)
            {
                infoHeader.AverageTimePerFrame = 10000000 / frameRate;
            }

            // copy the media structure back
            Marshal.StructureToPtr(infoHeader, mediaType.FormatPtr, false);

            // set the new format
            streamConfig.SetFormat(mediaType);

            mediaType.Dispose();
        }
Beispiel #27
0
        /// <summary>
        /// Set the Framerate, and video size
        /// </summary>
        /// <param name="capGraph">The <see cref="ICaptureGraphBuilder2"/> interface.</param>
        /// <param name="capFilter">The <see cref="IBaseFilter"/> of the capture device.</param>
        /// <param name="frameRate">The new framerate to be used.</param>
        /// <param name="width">The new video width to be used.</param>
        /// <param name="height">The new video height to be used.</param>
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int frameRate, int width, int height)
        {
            int         hr;
            object      o;
            AMMediaType media;

            // Find the stream config interface
            hr = this.capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);

            this.videoControl      = capFilter as IAMVideoControl;
            this.videoStreamConfig = o as IAMStreamConfig;

            if (this.videoStreamConfig == null)
            {
                ErrorLogger.WriteLine("Error in Capture.SetConfigParams(). Failed to get IAMStreamConfig");
            }

            // Get the existing format block
            hr = this.videoStreamConfig.GetFormat(out media);

            if (hr != 0)
            {
                ErrorLogger.WriteLine("Could not SetConfigParms in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            }

            // copy out the videoinfoheader
            VideoInfoHeader v = new VideoInfoHeader();

            Marshal.PtrToStructure(media.formatPtr, v);

            // if overriding set values
            if (frameRate > 0)
            {
                v.AvgTimePerFrame = 10000000 / frameRate;
            }

            if (width > 0)
            {
                v.BmiHeader.Width = width;
            }

            if (height > 0)
            {
                v.BmiHeader.Height = height;
            }

            // Copy the media structure back
            Marshal.StructureToPtr(v, media.formatPtr, true);

            // Set the new format
            hr = this.videoStreamConfig.SetFormat(media);
            if (hr != 0)
            {
                ErrorLogger.WriteLine("Error while setting new camera format (videoStreamConfig) in Camera.Capture. Message: " + DsError.GetErrorText(hr));
            }

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig)
        {
            if (videoStreamConfig == null)
            {
                throw new ArgumentNullException("videoStreamConfig");
            }
            AMMediaType           mediaType = null;
            VideoStreamConfigCaps caps      = null;
            IntPtr zero = IntPtr.Zero;

            try
            {
                IntPtr ptr2;
                int    num;
                int    num2;
                int    numberOfCapabilities = videoStreamConfig.GetNumberOfCapabilities(out num, out num2);
                if (numberOfCapabilities != 0)
                {
                    Marshal.ThrowExceptionForHR(numberOfCapabilities);
                }
                if (num <= 0)
                {
                    throw new NotSupportedException("This video device does not report capabilities.");
                }
                if (num2 > Marshal.SizeOf(typeof(VideoStreamConfigCaps)))
                {
                    throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure.");
                }
                zero = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps)));
                numberOfCapabilities = videoStreamConfig.GetStreamCaps(0, out ptr2, zero);
                if (numberOfCapabilities != 0)
                {
                    Marshal.ThrowExceptionForHR(numberOfCapabilities);
                }
                mediaType                  = (AMMediaType)Marshal.PtrToStructure(ptr2, typeof(AMMediaType));
                caps                       = (VideoStreamConfigCaps)Marshal.PtrToStructure(zero, typeof(VideoStreamConfigCaps));
                this.InputSize             = caps.InputSize;
                this.MinFrameSize          = caps.MinOutputSize;
                this.MaxFrameSize          = caps.MaxOutputSize;
                this.FrameSizeGranularityX = caps.OutputGranularityX;
                this.FrameSizeGranularityY = caps.OutputGranularityY;
                this.MinFrameRate          = 10000000.0 / ((double)caps.MaxFrameInterval);
                this.MaxFrameRate          = 10000000.0 / ((double)caps.MinFrameInterval);
            }
            finally
            {
                if (zero != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(zero);
                }
                zero = IntPtr.Zero;
                if (mediaType != null)
                {
                    DsUtils.FreeAMMediaType(mediaType);
                }
                mediaType = null;
            }
        }
Beispiel #29
0
        // Retrieve capabilities of a video device
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index)
        {
            AMMediaType           mediaType = null;
            VideoStreamConfigCaps caps      = new VideoStreamConfigCaps( );

            try
            {
                // retrieve capabilities struct at the specified index
                int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps);

                if (hr != 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                if (mediaType.FormatType == FormatType.VideoInfo)
                {
                    VideoInfoHeader videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    MediaType        = GUID.GetNickname(mediaType.SubType);
                    FrameSize        = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height);
                    BitCount         = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame);
                    MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval);
                    MinimumFrameRate = (int)(10000000 / caps.MaxFrameInterval);
                }
                else if (mediaType.FormatType == FormatType.VideoInfo2)
                {
                    VideoInfoHeader2 videoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader2));

                    MediaType        = GUID.GetNickname(mediaType.SubType);
                    FrameSize        = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height);
                    BitCount         = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame);
                    MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval);
                    MinimumFrameRate = (int)(10000000 / caps.MaxFrameInterval);
                }
                else
                {
                    throw new ApplicationException("Unsupported format found.");
                }

                // ignore 12 bpp formats for now, since it was noticed they cause issues on Windows 8
                // TODO: proper fix needs to be done so ICaptureGraphBuilder2::RenderStream() does not fail
                // on such formats
                if (BitCount <= 12)
                {
                    //throw new ApplicationException( "Unsupported format found." );
                }
            }
            finally
            {
                if (mediaType != null)
                {
                    mediaType.Dispose( );
                }
            }
        }
Beispiel #30
0
        // Set the Framerate, and video size
        private bool SetConfigParms(IPin pStill, int iWidth, int iHeight, short iBPP)
        {
            bool            success = true;
            int             hr;
            AMMediaType     media;
            VideoInfoHeader v;

            IAMStreamConfig videoStreamConfig = pStill as IAMStreamConfig;

            // Get the existing format block
            hr = videoStreamConfig.GetFormat(out media);
            DsError.ThrowExceptionForHR(hr);

            try
            {
                // copy out the videoinfoheader
                v = new VideoInfoHeader();
                Marshal.PtrToStructure(media.formatPtr, v);

                // if overriding the width, set the width
                if (iWidth > 0)
                {
                    v.BmiHeader.Width = iWidth;
                }

                // if overriding the Height, set the Height
                if (iHeight > 0)
                {
                    v.BmiHeader.Height = iHeight;
                }

                // if overriding the bits per pixel
                if (iBPP > 0)
                {
                    v.BmiHeader.BitCount = iBPP;
                }

                // Copy the media structure back
                Marshal.StructureToPtr(v, media.formatPtr, false);

                // Set the new format
                try
                {
                    hr = videoStreamConfig.SetFormat(media);
                    //DsError.ThrowExceptionForHR( hr );
                }
                catch
                {
                    success = false;
                }
            }
            finally
            {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
            return(success);
        }
Beispiel #31
0
        protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
        {
            if (streamConfig == null)
                throw new NotSupportedException();

            object returnValue = null;
            IntPtr pmt = IntPtr.Zero;
            AMMediaType mediaType = new AMMediaType();

            try
            {
                // Get the current format info
                int hr = streamConfig.GetFormat(out pmt);
                if (hr != 0)
                    Marshal.ThrowExceptionForHR(hr);
                Marshal.PtrToStructure(pmt, mediaType);

                // The formatPtr member points to different structures
                // dependingon the formatType
                object formatStruct;
                if (mediaType.formatType == FormatType.WaveEx)
                    formatStruct = new WaveFormatEx();
                else if (mediaType.formatType == FormatType.VideoInfo)
                    formatStruct = new VideoInfoHeader();
                else if (mediaType.formatType == FormatType.VideoInfo2)
                    formatStruct = new VideoInfoHeader2();
                else
                    throw new NotSupportedException("This device does not support a recognized format block.");

                // Retrieve the nested structure
                Marshal.PtrToStructure(mediaType.formatPtr, formatStruct);

                // Find the required field
                Type structType = formatStruct.GetType();
                FieldInfo fieldInfo = structType.GetField(fieldName);
                if (fieldInfo == null)
                    throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");

                // Update the value of the field
                fieldInfo.SetValue(formatStruct, newValue);

                // PtrToStructure copies the data so we need to copy it back
                Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false);

                // Save the changes
                hr = streamConfig.SetFormat(mediaType);
                if (hr != 0)
                    Marshal.ThrowExceptionForHR(hr);
            }
            finally
            {
                //DsUtils.FreeAMMediaType(mediaType);
                Marshal.FreeCoTaskMem(pmt);
            }

            return (returnValue);
        }
        // Set the Framerate, and video size
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iSampleRate, int iChannels)
        {
            int         hr;
            object      o;
            AMMediaType media;

            // Find the stream config interface
            hr = capGraph.FindInterface(
                PinCategory.Capture, MediaType.Audio, capFilter, typeof(IAMStreamConfig).GUID, out o);

            IAMStreamConfig audioStreamConfig = o as IAMStreamConfig;

            if (audioStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            // Get the existing format block
            hr = audioStreamConfig.GetFormat(out media);
            DsError.ThrowExceptionForHR(hr);

            // copy out the videoinfoheader
            WaveFormatEx i = new WaveFormatEx();

            Marshal.PtrToStructure(media.formatPtr, i);


            i.wFormatTag      = 0x0001;        // WAVE_FORMAT_PCM
            i.wBitsPerSample  = 16;
            i.nSamplesPerSec  = 44100;
            i.nChannels       = m_Channels;
            i.nBlockAlign     = 2;
            i.nAvgBytesPerSec = (i.nSamplesPerSec * i.nBlockAlign);
            i.cbSize          = 0;

            // if overriding the framerate, set the frame rate
            if (iSampleRate > 0)
            {
                i.nSamplesPerSec = iSampleRate;
            }

            // if overriding the width, set the width
            if (iChannels > 0)
            {
                i.nChannels = (short)iChannels;
            }

            // Copy the media structure back
            Marshal.StructureToPtr(i, media.formatPtr, false);

            // Set the new format
            hr = audioStreamConfig.SetFormat(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Beispiel #33
0
        /// <summary>
        /// Gets iSC's current _AMMediaType, and frees pbFormat
        /// </summary>
        public static void GetMediaType(IAMStreamConfig iSC, out _AMMediaType mt, out object formatBlock)
        {
            IntPtr pmt = IntPtr.Zero;
            iSC.GetFormat(out pmt);

            // Frees pmt and mt.pbFormat
            MediaType.MarshalData(ref pmt, out mt, out formatBlock);

            System.Diagnostics.Debug.Assert(pmt == IntPtr.Zero);
            System.Diagnostics.Debug.Assert(mt.pbFormat == IntPtr.Zero && mt.cbFormat == 0);
        }
Beispiel #34
0
        /// <summary>
        /// Gets iSC's current _AMMediaType, without freeing pbFormat
        /// Caller should call MediaType.Free(ref _AMMediaType) when done
        /// </summary>
        public static _AMMediaType GetMediaType(IAMStreamConfig iSC)
        {
            IntPtr pmt = IntPtr.Zero;
            iSC.GetFormat(out pmt);

            _AMMediaType mt;
            MediaType.MarshalData(ref pmt, out mt); // Frees pmt
            System.Diagnostics.Debug.Assert(pmt == IntPtr.Zero);

            return mt;
        }
        // Retrieve capabilities of a video device
        static internal VideoCapabilities[] FromStreamConfig( IAMStreamConfig videoStreamConfig )
        {
            if ( videoStreamConfig == null )
                throw new ArgumentNullException( "videoStreamConfig" );

            // ensure this device reports capabilities
            int count, size;
            int hr = videoStreamConfig.GetNumberOfCapabilities( out count, out size );

            if ( hr != 0 )
                Marshal.ThrowExceptionForHR( hr );

            if ( count <= 0 )
                throw new NotSupportedException( "This video device does not report capabilities." );

            if ( size > Marshal.SizeOf( typeof( VideoStreamConfigCaps ) ) )
                throw new NotSupportedException( "Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure." );

            // group capabilities with similar parameters
            Dictionary<ulong, VideoCapabilities> videocapsList = new Dictionary<ulong, VideoCapabilities>();

            for (int i = 0; i < count; i++)
            {
                try
                {
                    VideoCapabilities vc = new VideoCapabilities(videoStreamConfig, i);

                    ulong key = (((uint)vc.AverageFrameRate) << 48) |
                               (((uint)vc.FrameSize.Height) << 32) |
                               (((uint)vc.FrameSize.Width) << 16);

                    if (!videocapsList.ContainsKey(key))
                    {
                        videocapsList.Add(key, vc);
                    }
                    else
                    {
                        if (vc.BitCount > videocapsList[key].BitCount)
                        {
                            videocapsList[key] = vc;
                        }
                    }
                }
                catch
                {
                }
            }


            VideoCapabilities[] videocaps = new VideoCapabilities[videocapsList.Count];
            videocapsList.Values.CopyTo( videocaps, 0 );

            return videocaps;
        }
		// ----------------- Constructor ---------------------

		/// <summary> Retrieve capabilities of an audio device </summary>
		internal AudioCapabilities(IAMStreamConfig audioStreamConfig)
		{
			if ( audioStreamConfig == null ) 
				throw new ArgumentNullException( "audioStreamConfig" );

			AMMediaType mediaType = null;
			AudioStreamConfigCaps caps = null;
			IntPtr pCaps = IntPtr.Zero;
			IntPtr pMediaType;
			try
			{
				// Ensure this device reports capabilities
				int c, size;
				int hr = audioStreamConfig.GetNumberOfCapabilities( out c, out size );
				if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr );
				if ( c <= 0 ) 
					throw new NotSupportedException( "This audio device does not report capabilities." );
				if ( size > Marshal.SizeOf( typeof( AudioStreamConfigCaps ) ) )
					throw new NotSupportedException( "Unable to retrieve audio device capabilities. This audio device requires a larger AudioStreamConfigCaps structure." );
				if ( c > 1 )
					Debug.WriteLine("WARNING: This audio device supports " + c + " capability structures. Only the first structure will be used." );

				// Alloc memory for structure
				pCaps = Marshal.AllocCoTaskMem( Marshal.SizeOf( typeof( AudioStreamConfigCaps ) ) ); 

				// Retrieve first (and hopefully only) capabilities struct
				hr = audioStreamConfig.GetStreamCaps( 0, out pMediaType, pCaps );
				if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr );

				// Convert pointers to managed structures
				mediaType = (AMMediaType) Marshal.PtrToStructure( pMediaType, typeof( AMMediaType ) );
				caps = (AudioStreamConfigCaps) Marshal.PtrToStructure( pCaps, typeof( AudioStreamConfigCaps ) );

				// Extract info
				MinimumChannels						= caps.MinimumChannels;
				MaximumChannels						= caps.MaximumChannels;
				ChannelsGranularity					= caps.ChannelsGranularity;
				MinimumSampleSize					= caps.MinimumBitsPerSample;
				MaximumSampleSize					= caps.MaximumBitsPerSample;
				SampleSizeGranularity				= caps.BitsPerSampleGranularity;
				MinimumSamplingRate					= caps.MinimumSampleFrequency;
				MaximumSamplingRate					= caps.MaximumSampleFrequency;
				SamplingRateGranularity				= caps.SampleFrequencyGranularity;
				
			}
			finally
			{
				if ( pCaps != IntPtr.Zero )
					Marshal.FreeCoTaskMem( pCaps ); pCaps = IntPtr.Zero;
				if ( mediaType != null )
					DsUtils.FreeAMMediaType( mediaType ); mediaType = null;
			}
		}
		/// <summary> Retrieve capabilities of a video device </summary>
		internal VideoCapabilities(IAMStreamConfig videoStreamConfig)
		{
			if (videoStreamConfig == null)
				throw new ArgumentNullException("videoStreamConfig");

			AMMediaType mediaType = null;
			VideoStreamConfigCaps caps = null;
			IntPtr pCaps = IntPtr.Zero;
			//IntPtr pMediaType;
			try
			{
				// Ensure this device reports capabilities
				int c, size;
				int hr = videoStreamConfig.GetNumberOfCapabilities(out c, out size);
				Marshal.ThrowExceptionForHR(hr);

				if (c <= 0)
					throw new NotSupportedException("This video device does not report capabilities.");
				if (size > Marshal.SizeOf(typeof(VideoStreamConfigCaps)))
					throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure.");
				if (c > 1)
					Debug.WriteLine("This video device supports " + c + " capability structures. Only the first structure will be used.");

				// Alloc memory for structure
				pCaps = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps)));

				// Retrieve first (and hopefully only) capabilities struct
				hr = videoStreamConfig.GetStreamCaps(0, out mediaType, pCaps);
				Marshal.ThrowExceptionForHR(hr);

				// Convert pointers to managed structures
				//mediaType = (AMMediaType)Marshal.PtrToStructure(pMediaType, typeof(AMMediaType));
				caps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pCaps, typeof(VideoStreamConfigCaps));

				// Extract info
				InputSize = caps.InputSize;
				MinFrameSize = caps.MinOutputSize;
				MaxFrameSize = caps.MaxOutputSize;
				FrameSizeGranularityX = caps.OutputGranularityX;
				FrameSizeGranularityY = caps.OutputGranularityY;
				MinFrameRate = (double)10000000 / caps.MaxFrameInterval;
				MaxFrameRate = (double)10000000 / caps.MinFrameInterval;
			}
			finally
			{
				if (pCaps != IntPtr.Zero)
					Marshal.FreeCoTaskMem(pCaps); pCaps = IntPtr.Zero;
				if (mediaType != null)
					DsUtils.FreeAMMediaType(mediaType); mediaType = null;
			}
		}
Beispiel #38
0
        // Retrieve capabilities of a video device
        internal VideoCapabilities( IAMStreamConfig videoStreamConfig, int index )
        {
            AMMediaType mediaType = null;
            var caps = new VideoStreamConfigCaps( );

            try
            {
                // retrieve capabilities struct at the specified index
                int hr = videoStreamConfig.GetStreamCaps( index, out mediaType, caps );

                if ( hr != 0 )
                    Marshal.ThrowExceptionForHR( hr );

                if ( mediaType.FormatType == FormatType.VideoInfo )
                {
                    var videoInfo = (VideoInfoHeader) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader ) );

                    FrameSize = new Size( videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height );
                    BitCount = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int) ( 10000000 / videoInfo.AverageTimePerFrame );
                    MaximumFrameRate = (int) ( 10000000 / caps.MinFrameInterval );
                }
                else if ( mediaType.FormatType == FormatType.VideoInfo2 )
                {
                    var videoInfo = (VideoInfoHeader2) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader2 ) );

                    FrameSize = new Size( videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height );
                    BitCount = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int) ( 10000000 / videoInfo.AverageTimePerFrame );
                    MaximumFrameRate = (int) ( 10000000 / caps.MinFrameInterval );
                }
                else
                {
                    throw new ApplicationException( "Unsupported format found." );
                }

                // ignore 12 bpp formats for now, since it was noticed they cause issues on Windows 8
                // TODO: proper fix needs to be done so ICaptureGraphBuilder2::RenderStream() does not fail
                // on such formats
                if ( BitCount <= 12 )
                {
                    throw new ApplicationException( "Unsupported format found." );
                }
            }
            finally
            {
                if ( mediaType != null )
                    mediaType.Dispose( );
            }
        }
        // Retrieve capabilities of a video device
        internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index)
        {
            AMMediaType mediaType = null;
            VideoStreamConfigCaps caps = new VideoStreamConfigCaps();

            // retrieve capabilities struct at the specified index
            int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps);

            if (hr != 0)
                Marshal.ThrowExceptionForHR(hr);

            // extract info
            MediaType = mediaType;
            FrameSize = caps.InputSize;
            MaxFrameRate = (int)(10000000 / caps.MinFrameInterval);
        }
        // Retrieve capabilities of a video device
        static internal VideoCapabilities[] FromStreamConfig( IAMStreamConfig videoStreamConfig )
        {
            if ( videoStreamConfig == null )
                throw new ArgumentNullException( "videoStreamConfig" );

            // ensure this device reports capabilities
            int count, size;
            int hr = videoStreamConfig.GetNumberOfCapabilities( out count, out size );

            if ( hr != 0 )
                Marshal.ThrowExceptionForHR( hr );

            if ( count <= 0 )
                throw new NotSupportedException( "This video device does not report capabilities." );

            if ( size > Marshal.SizeOf( typeof( VideoStreamConfigCaps ) ) )
                throw new NotSupportedException( "Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure." );

            Dictionary<uint, VideoCapabilities> videocapsList = new Dictionary<uint, VideoCapabilities>( );

            for ( int i = 0; i < count; i++ )
            {
                // vidcaps[i] = new VideoCapabilities( videoStreamConfig, i );
                VideoCapabilities vc = new VideoCapabilities( videoStreamConfig, i );

                uint key = ( ( (uint) vc.FrameSize.Height ) << 16 ) | (uint) vc.FrameSize.Width;

                if ( !videocapsList.ContainsKey( key ) )
                {
                    videocapsList.Add( key, vc );
                }
            }

            VideoCapabilities[] videocaps = new VideoCapabilities[videocapsList.Count];
            videocapsList.Values.CopyTo( videocaps, 0 );

            return videocaps;
        }
Beispiel #41
0
        private void InitAMStreamConfig(ICaptureGraphBuilder2 captureGraphBuilder2, IBaseFilter aDev)
        {
            Object o;
            if (AMStreamConfig != null)
            {
               // IBaseFilter bf = (IBaseFilter)AMStreamConfig;
              //  RemoveFilter(ref bf);
            }
            var hr = captureGraphBuilder2.FindInterface(PinCategory.Capture, MediaType.Video, aDev, typeof(IAMStreamConfig).GUID, out o);
            DsError.ThrowExceptionForHR(hr);
            AMStreamConfig = o as IAMStreamConfig;

            if (AMStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }
        }
Beispiel #42
0
        // Set resolution for the specified stream configuration
        private static void SetResolution( IAMStreamConfig streamConfig, VideoCapabilities resolution )
        {
            if ( resolution == null )
            {
                return;
            }

            // iterate through device's capabilities to find mediaType for desired resolution
            int capabilitiesCount, capabilitySize;
            AMMediaType newMediaType = null;
            var caps = new VideoStreamConfigCaps( );

            streamConfig.GetNumberOfCapabilities( out capabilitiesCount, out capabilitySize );

            for ( int i = 0; i < capabilitiesCount; i++ )
            {
                try
                {
                    var vc = new VideoCapabilities( streamConfig, i );

                    if ( resolution == vc )
                    {
                        if ( streamConfig.GetStreamCaps( i, out newMediaType, caps ) == 0 )
                        {
                            break;
                        }
                    }
                }
                catch(Exception ex)
                {
                    // ignored
                    Logger.LogExceptionToFile(ex,"SetResolution");
                }
            }

            // set the new format
            if ( newMediaType != null )
            {
                streamConfig.SetFormat( newMediaType );
                newMediaType.Dispose( );
            }
        }
Beispiel #43
0
		private void setMediaSubType( IAMStreamConfig streamConfig, DxUtils.ColorSpaceEnum newValue )
		{
			if(this.dxUtils == null)
			{
				return;
			}

			if(videoStreamConfig == null)
			{
				throw new NotSupportedException();
			}
			assertStopped();
			derenderGraph();

			dxUtils.setMediaSubType(streamConfig, newValue);

			renderGraph();
			startPreviewIfNeeded();
		}
Beispiel #44
0
		private DxUtils.ColorSpaceEnum getMediaSubType( IAMStreamConfig streamConfig )
		{
			if(this.dxUtils == null)
			{
				return DxUtils.ColorSpaceEnum.RGB24;
			}

			// Derender the graph. For some drivers these settings
			// cannot be read while the graph is built
			if ( streamConfig == null )
			{
				throw new NotSupportedException();
			}
			assertStopped();
			derenderGraph();

			DxUtils.ColorSpaceEnum retval = dxUtils.getMediaSubType(streamConfig);

			renderGraph();
			startPreviewIfNeeded();

			return retval;
		}
Beispiel #45
0
		/// <summary>
		/// Set video type for the specified pin interface
		/// </summary>
		/// <param name="streamConfig"></param>
		/// <param name="newValue"></param>
		/// <returns></returns>
		public bool setMediaSubType(IAMStreamConfig streamConfig, Guid newValue)
		{
#if DSHOWNET
			IntPtr pmt = IntPtr.Zero;
#endif
			AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
				int hr = streamConfig.GetFormat(out pmt);
				if(hr < 0)
				{
					return false;
				}
				Marshal.PtrToStructure(pmt, mediaType);
#else
				int hr = streamConfig.GetFormat(out mediaType);
				if(hr < 0)
				{
					return false;
				}
#endif

				// Change the media subtype
				// Each enum value has a Guid associated with it
				// We store the Guid as a string in a LabelAttribute
				// applied to each enum value. See the ColorSpaceEnum.
				mediaType.subType = newValue;

				// Save the changes
				hr = streamConfig.SetFormat(mediaType);
				if(hr < 0)
				{
					return false;
				}		
			}
			finally
			{
				DsUtils.FreeAMMediaType(mediaType);
#if DSHOWNET
				Marshal.FreeCoTaskMem(pmt);
#endif
			}
			return true;
		}
Beispiel #46
0
 /// <summary>
 /// Sets the _AMMediaType on the pin, then frees it
 /// </summary>
 public static void SetMediaType(IAMStreamConfig iSC, ref _AMMediaType mt)
 {
     try
     {
         SetMediaType(iSC, mt);
     }
     finally
     {
         MediaType.Free(ref mt);
     }
 }
Beispiel #47
0
        // Retrieve capabilities of a video device
        internal VideoCapabilities( IAMStreamConfig videoStreamConfig, int index )
        {
            AMMediaType mediaType = null;
            var caps = new VideoStreamConfigCaps( );

            try
            {
                // retrieve capabilities struct at the specified index
                int hr = videoStreamConfig.GetStreamCaps( index, out mediaType, caps );

                if ( hr != 0 )
                    Marshal.ThrowExceptionForHR( hr );

                if ( mediaType.FormatType == FormatType.VideoInfo )
                {
                    var videoInfo = (VideoInfoHeader) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader ) );

                    FrameSize = new Size( videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height );
                    BitCount = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int) ( 10000000 / videoInfo.AverageTimePerFrame );
                    MaximumFrameRate = (int) ( 10000000 / caps.MinFrameInterval );
                }
                else if ( mediaType.FormatType == FormatType.VideoInfo2 )
                {
                    var videoInfo = (VideoInfoHeader2) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader2 ) );

                    FrameSize = new Size( videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height );
                    BitCount = videoInfo.BmiHeader.BitCount;
                    AverageFrameRate = (int) ( 10000000 / videoInfo.AverageTimePerFrame );
                    MaximumFrameRate = (int) ( 10000000 / caps.MinFrameInterval );
                }
                else
                {
                    throw new ApplicationException( "Unsupported format found." );
                }

            }
            finally
            {
                mediaType?.Dispose( );
            }
        }
Beispiel #48
0
		/// <summary>
		/// Find media data by trial and error, just try every media type
		/// in the list, the ones that do not return an error, are accepted.
		/// This function might be handy if DShowNET is used as library
		/// instead of DirectShowLib.
		/// This function should be called with a derendered graph only,
		/// so with capture device only and no rendering of audio, video or
		/// VBI.
		/// </summary>
		/// <param name="streamConfig"></param>
		/// <returns></returns>
		public bool FindMediaData(IAMStreamConfig streamConfig)
		{
			bool result = false;
			try
			{
				ColorSpaceEnum currentValue = this.getMediaSubType(streamConfig);
				if(this.subTypeList != null)
				{
					this.subTypeList.Clear();
				}

				foreach (object c in Enum.GetValues(typeof(ColorSpaceEnum)))
				{
					Guid subType = new Guid(LabelAttribute.FromMember(c));
					if(this.setMediaSubType(streamConfig, subType))
					{
						if(this.subTypeList == null)
						{
							this.subTypeList = new ArrayList();
						}
						// Check if subtype is already in list,
						// if so then do not add, else add to list
						bool notinlist = true;
						for(int i = 0;(i < this.subTypeList.Count)&&(notinlist); i++)
						{
							if(((Guid)this.subTypeList[i]) == subType)
							{
								notinlist = false;
							}
						}

						if(notinlist)
						{
							this.subTypeList.Add(subType);
							result = true;
						}
					}
				}
				this.setMediaSubType(streamConfig, currentValue);
				return result;
			}
			catch {}
			return result;
		}
Beispiel #49
0
        public static void GetStreamConfigCaps(IAMStreamConfig iSC, 
            out ArrayList mediaTypes, out ArrayList infoHeaders, out ArrayList streamConfigCaps)
        {
            // Initialize return values
            mediaTypes = new ArrayList();
            infoHeaders = new ArrayList();
            streamConfigCaps = new ArrayList();

            // Find out how many capabilities the stream has
            int piCount, piSize;
            iSC.GetNumberOfCapabilities(out piCount, out piSize);

            IntPtr pSCC = Marshal.AllocCoTaskMem(piSize);

            try
            {
                // Iterate through capabilities
                for(int i = 0; i < piCount; i++)
                {
                    IntPtr pmt = IntPtr.Zero;
                    iSC.GetStreamCaps(i, out pmt, pSCC);

                    _AMMediaType mt;
                    object formatBlock;

                    MediaType.MarshalData(ref pmt, out mt, out formatBlock); // Frees pmt

                    mediaTypes.Add(mt);
                    infoHeaders.Add(formatBlock);
                    streamConfigCaps.Add(MarshalStreamConfigCaps(mt.majortype, pSCC));
                }
            }
            finally
            {
                Marshal.FreeCoTaskMem(pSCC);
            }
        }
Beispiel #50
0
        /// <summary>
        /// Constructs the _AMMediaType (adds pbFormat to it), sets it, then frees it
        /// </summary>
        public static void SetMediaType(IAMStreamConfig iSC, _AMMediaType mt, object formatBlock)
        {
            System.Diagnostics.Debug.Assert(mt.pbFormat == IntPtr.Zero && mt.cbFormat == 0);

            mt = MediaType.Construct(mt, formatBlock);
            SetMediaType(iSC, ref mt);
        }
        private void InitCaptureInterface()
        {
            // release com object (useless here but can't hurt)
            Cleanup(true);

            this.fmc = new FilgraphManagerClass();

            // create the cg object and add the filter graph to it
            Type t = Type.GetTypeFromCLSID(CLSID_CaptureGraphBuilder2);
            this.icgb = (ICaptureGraphBuilder2)Activator.CreateInstance(t);

            t = Type.GetTypeFromCLSID(CLSID_SampleGrabber);
            this.isg = (ISampleGrabber)Activator.CreateInstance(t);

            // source filter (the capture device)
            this.sf = (IBaseFilter)this.SourceFilterList[this.cbxDevice.SelectedIndex];
            // sample grabber filter
            this.sgf = (IBaseFilter)this.isg;

            object o = null;
            this.icgb.RemoteFindInterface(ref PIN_CATEGORY_CAPTURE, ref MEDIATYPE_Video, sf, ref IID_IAMStreamConfig, out o);
            this.iamsc = (IAMStreamConfig)o;

            // set sample grabber media type
            this.SGMediaType = new _AMMediaType();
            this.SGMediaType.majortype = MEDIATYPE_Video;
            this.SGMediaType.subtype = MEDIASUBTYPE_RGB24;
            this.SGMediaType.formattype = FORMAT_VideoInfo;
            this.isg.SetMediaType(ref SGMediaType);

            this.isg.SetOneShot(0);
            this.isg.SetBufferSamples(1);
        }
Beispiel #52
0
		// --------------------- Private Methods -----------------------
		
		/// <summary> 
		///  Create a new filter graph and add filters (devices, compressors, 
		///  misc), but leave the filters unconnected. Call renderGraph()
		///  to connect the filters.
		/// </summary>
		protected void createGraph()
		{
			Guid					cat;
			Guid					med;
			int						hr;

			// Ensure required properties are set
			if ( videoDevice == null && audioDevice == null )
				throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" );

			// Skip if we are already created
			if ( (int)graphState < (int)GraphState.Created )
			{
				// Garbage collect, ensure that previous filters are released
				GC.Collect();

				// Make a new filter graph
#if DSHOWNET
                // Make a new filter graph
                graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));

                // Get the Capture Graph Builder
                Guid clsid = Clsid.CaptureGraphBuilder2;
                Guid riid = typeof(ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);
#else
				FilterGraph graph = new FilterGraph();
				graphBuilder = (IGraphBuilder)graph;

				// Get the Capture Graph Builder
				captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
#endif

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Add the graph to the Running Object Table so it can be
                // viewed with GraphEdit
#if DEBUG
#if DSHOWNET
				DsROT.AddGraphToRot(graphBuilder, out rotCookie);
#else
                rotCookie = new DsROTEntry(graphBuilder);
#endif
#endif

                // Get the video device and add it to the filter graph
				if ( VideoDevice != null )
				{
					videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString );
					hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

				// Get the audio device and add it to the filter graph
				if ( AudioDevice != null )
				{
					audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString );
					hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

                // Get the video compressor and add it to the filter graph
				if ( VideoCompressor != null )
				{
					videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString ); 
					hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

				// Get the audio compressor and add it to the filter graph
				if ( AudioCompressor != null )
				{
					audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString ); 
					hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" );
					if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
				}

				// Retrieve the stream control interface for the video device
				// FindInterface will also add any required filters
				// (WDM devices in particular may need additional
				// upstream filters to function).

				// Try looking for an interleaved media type
				object o;
				cat = PinCategory.Capture;
				med = MediaType.Interleaved;
				Guid iid = typeof(IAMStreamConfig).GUID;
#if DSHOWNET
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
				hr = captureGraphBuilder.FindInterface(
					DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif

				if ( hr != 0 )
				{
					// If not found, try looking for a video media type
					med = MediaType.Video;
#if DSHOWNET
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
					hr = captureGraphBuilder.FindInterface(
						DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
				
					if ( hr != 0 )
						o = null;
				}
				videoStreamConfig = o as IAMStreamConfig;

// #if NEWCODE
				// Start of new Brian's Low code
				// Retrieve the stream control interface for the video device
				// FindInterface will also add any required filters
				// (WDM devices in particular may need additional
				// upstream filters to function).

				// Try looking for an interleaved media type
				o = null;
				cat = PinCategory.Preview;
				med = MediaType.Interleaved;
				iid = typeof(IAMStreamConfig).GUID;
#if DSHOWNET
				hr = captureGraphBuilder.FindInterface(
					ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
				hr = captureGraphBuilder.FindInterface(
					DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif

				if ( hr != 0 )
				{
					// If not found, try looking for a video media type
					med = MediaType.Video;
#if DSHOWNET
					hr = captureGraphBuilder.FindInterface(
						ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
					hr = captureGraphBuilder.FindInterface(
						DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
				
					if ( hr != 0 )
						o = null;
				}
				this.previewStreamConfig = o as IAMStreamConfig;
				// End of new Brian's Low code

				if( (this.videoStreamConfig != null)||
					(this.previewStreamConfig != null) )
				{
					this.dxUtils = new DxUtils();
					bool result = this.dxUtils.InitDxUtils(this.videoDeviceFilter);

					if((!result)&&(!this.dxUtils.FindMediaData(this.videoStreamConfig)))
					{
						this.dxUtils.Dispose();
						this.dxUtils = null;
					}
				}
// #endif
				// Retrieve the stream control interface for the audio device
				o = null;
				cat = PinCategory.Capture;
				med = MediaType.Audio ;
				iid = typeof(IAMStreamConfig).GUID;
				if( (this.AudioViaPci)&&
					(audioDeviceFilter == null)&&(videoDeviceFilter != null) )
				{
                    hr = captureGraphBuilder.FindInterface(
#if DSHOWNET
						ref cat, ref med, videoDeviceFilter, ref iid, out o );
#else
                        DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o);
#endif
				}
				else
				{
#if DSHOWNET
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, audioDeviceFilter, ref iid, out o);
#else
                    hr = captureGraphBuilder.FindInterface(
	    				DsGuid.FromGuid(cat), DsGuid.FromGuid(med), audioDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
				}

				if (hr != 0)
					o = null;
				audioStreamConfig = o as IAMStreamConfig;

				// Retreive the media control interface (for starting/stopping graph)
				mediaControl = (IMediaControl) graphBuilder;

				// Reload any video crossbars
				if ( videoSources != null ) videoSources.Dispose(); videoSources = null;

				// Reload any audio crossbars
				if ( audioSources != null ) audioSources.Dispose(); audioSources = null;
				
				// Reload any property pages exposed by filters
                this.PropertyPages = null;

				// Reload capabilities of video device
				videoCaps = null;
				previewCaps = null;

				// Reload capabilities of video device
				audioCaps = null;

				// Retrieve TV Tuner if available
				o = null;
				cat = PinCategory.Capture;
				med = MediaType.Interleaved; 
				iid = typeof(IAMTVTuner).GUID;
#if DSHOWNET
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else

				hr = captureGraphBuilder.FindInterface( 
					DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
				if ( hr != 0 )
				{
					med = MediaType.Video ;
#if DSHOWNET
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
					hr = captureGraphBuilder.FindInterface( 
						DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o );
#endif
					if ( hr != 0 )
						o = null;
				}
				IAMTVTuner t = o as IAMTVTuner;
				if ( t != null )
				{
					tuner = new Tuner(t);
					// Do not forget to set proper country code (Netherlands is 31)
				}

				// No check on TV Audio needed, it will show up in the
				// PropertyPages when it is available
				// Code for finding the TV audio interface
				o = null;
				cat = PinCategory.Capture;
				med = MediaType.Interleaved;
				iid = typeof(IAMTVAudio).GUID;
				hr = captureGraphBuilder.FindInterface(
#if DSHOWNET
					ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
                    cat, med, videoDeviceFilter, iid, out o);
#endif
				if ( hr != 0 )
				{
					med = MediaType.Video;
#if DSHOWNET
					hr = captureGraphBuilder.FindInterface(
						ref cat, ref med, videoDeviceFilter, ref iid, out o);
#else
				hr = captureGraphBuilder.FindInterface(
					cat, med, videoDeviceFilter, iid, out o);
#endif
					if ( hr != 0 )
					{
						o = null;
					}
				}

				if((o != null)&&(tuner != null))
				{
					IAMTVAudio a = o as IAMTVAudio;
					TvAudio = a;
#if DEBUG
					Debug.WriteLine("FindInterface tuner.TvAudio");
#endif // DEBUG
				}

				/*
							// ----------- VMR 9 -------------------
							//## check out samples\inc\vmrutil.h :: RenderFileToVMR9

							IBaseFilter vmr = null;
							if ( ( VideoDevice != null ) && ( previewWindow != null ) )
							{
								vmr = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.VideoMixingRenderer9, true ) ); 
								hr = graphBuilder.AddFilter( vmr, "VMR" );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

								IVMRFilterConfig9 vmrFilterConfig = (IVMRFilterConfig9) vmr;
								hr = vmrFilterConfig.SetRenderingMode( VMRMode9.Windowless );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

								IVMRWindowlessControl9 vmrWindowsless = (IVMRWindowlessControl9) vmr;	
								hr = vmrWindowsless.SetVideoClippingWindow( previewWindow.Handle );
								if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
							}
							//------------------------------------------- 

							// ---------- SmartTee ---------------------

							IBaseFilter smartTeeFilter = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.SmartTee, true ) ); 
							hr = graphBuilder.AddFilter( smartTeeFilter, "Video Smart Tee" );
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// Video -> SmartTee
							cat = PinCategory.Capture;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, null, smartTeeFilter ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// smarttee -> mux
							cat = PinCategory.Capture;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), smartTeeFilter, null, muxFilter ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// smarttee -> vmr
							cat = PinCategory.Preview;
							med = MediaType.Video;
							hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), smartTeeFilter, null, vmr ); 
							if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

							// -------------------------------------
				*/		
				// Update the state now that we are done
				graphState = GraphState.Created;
			}
		}
Beispiel #53
0
		/// <summary>
		///  Retrieves the value of one member of the IAMStreamConfig format block.
		///  Helper function for several properties that expose
		///  video/audio settings from IAMStreamConfig.GetFormat().
		///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
		///  AMMediaType.formatPtr points to a format block structure.
		///  This format block structure may be one of several 
		///  types, the type being determined by AMMediaType.formatType.
		/// </summary>
		protected object getStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName)
		{
			if ( streamConfig == null )
				throw new NotSupportedException();
			assertStopped();

			derenderGraph();

			object returnValue = null;
#if DSHOWNET
			IntPtr pmt = IntPtr.Zero;
#endif
			AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
                int hr = streamConfig.GetFormat(out pmt);
#else
				int hr = streamConfig.GetFormat(out mediaType);
#endif
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );

#if DSHOWNET
				Marshal.PtrToStructure( pmt, mediaType );
#endif

				// The formatPtr member points to different structures
				// dependingon the formatType
				object formatStruct;
				if ( mediaType.formatType == FormatType.WaveEx )
					formatStruct = new WaveFormatEx();
				else if ( mediaType.formatType == FormatType.VideoInfo )
					formatStruct = new VideoInfoHeader();
				else if ( mediaType.formatType == FormatType.VideoInfo2 )
					formatStruct = new VideoInfoHeader2();
				else
					throw new NotSupportedException( "This device does not support a recognized format block." );

				// Retrieve the nested structure
				Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

				// Find the required field
				Type structType = formatStruct.GetType();
				FieldInfo fieldInfo = structType.GetField( fieldName );
				if ( fieldInfo == null )
					throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." );

				// Extract the field's current value
				returnValue = fieldInfo.GetValue( formatStruct ); 
						
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
#if DSHOWNET
				Marshal.FreeCoTaskMem( pmt );
#endif
			}
			renderGraph();
			startPreviewIfNeeded();

			return( returnValue );
		}
Beispiel #54
0
 /// <summary>
 /// Sets the _AMMediaType on the pin, but doesn't free it
 /// </summary>
 public static void SetMediaType(IAMStreamConfig iSC, _AMMediaType mt)
 {
     System.Diagnostics.Debug.Assert(mt.pbFormat != IntPtr.Zero && mt.cbFormat != 0);
     iSC.SetFormat(ref mt);
 }
Beispiel #55
0
 protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue)
 {
     if (streamConfig == null)
     {
         throw new NotSupportedException();
     }
     this.assertStopped();
     this.derenderGraph();
     IntPtr zero = IntPtr.Zero;
     AMMediaType structure = new AMMediaType();
     try
     {
         object obj3;
         int format = streamConfig.GetFormat(out zero);
         if (format != 0)
         {
             Marshal.ThrowExceptionForHR(format);
         }
         Marshal.PtrToStructure(zero, structure);
         if (structure.formatType == FormatType.WaveEx)
         {
             obj3 = new WaveFormatEx();
         }
         else if (structure.formatType == FormatType.VideoInfo)
         {
             obj3 = new VideoInfoHeader();
         }
         else
         {
             if (structure.formatType != FormatType.VideoInfo2)
             {
                 throw new NotSupportedException("This device does not support a recognized format block.");
             }
             obj3 = new VideoInfoHeader2();
         }
         Marshal.PtrToStructure(structure.formatPtr, obj3);
         FieldInfo field = obj3.GetType().GetField(fieldName);
         if (field == null)
         {
             throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block.");
         }
         field.SetValue(obj3, newValue);
         Marshal.StructureToPtr(obj3, structure.formatPtr, false);
         format = streamConfig.SetFormat(structure);
         if (format != 0)
         {
             Marshal.ThrowExceptionForHR(format);
         }
     }
     finally
     {
         DsUtils.FreeAMMediaType(structure);
         Marshal.FreeCoTaskMem(zero);
     }
     this.renderGraph();
     this.startPreviewIfNeeded();
     return null;
 }
Beispiel #56
0
		/// <summary>
		/// Get the video type for the specified pin interface
		/// </summary>
		/// <param name="streamConfig"></param>
		/// <returns></returns>
		public ColorSpaceEnum getMediaSubType(IAMStreamConfig streamConfig)
		{
			ColorSpaceEnum retval = ColorSpaceEnum.RGB24;
			bool found;
#if DSHOWNET
			IntPtr pmt = IntPtr.Zero;
#endif
			AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
				int hr = streamConfig.GetFormat(out pmt);
				if (hr < 0)
				{
					Marshal.ThrowExceptionForHR(hr);
				}
				Marshal.PtrToStructure(pmt, mediaType);
#else
				int hr = streamConfig.GetFormat(out mediaType);
				if (hr < 0)
				{
					Marshal.ThrowExceptionForHR(hr);
				}
#endif

				// Search the Guids to find the correct enum value.
				// Each enum value has a Guid associated with it
				// We store the Guid as a string in a LabelAttribute
				// applied to each enum value. See the ColorSpaceEnum.
				found = false;
				foreach (object c in Enum.GetValues(typeof(ColorSpaceEnum)))
				{
					if (mediaType.subType == new Guid(LabelAttribute.FromMember(c)))
					{
						found = true;
						retval = (ColorSpaceEnum)c;
					}
				}
				if(!found)
				{
#if DEBUG
					String mediaSubType;
					MakeFourCC(mediaType.subType, out mediaSubType);
					Debug.WriteLine("Unknown color space (media subtype=" + mediaSubType + "):" + mediaType.subType.ToString());
#endif
					throw new ApplicationException("Unknown color space (media subtype):" + mediaType.subType.ToString());
				}
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
#if DSHOWNET
				Marshal.FreeCoTaskMem( pmt );
#endif
			}

			return retval;
		}
Beispiel #57
0
 protected void createGraph()
 {
     if ((this.videoDevice == null) && (this.audioDevice == null))
     {
         throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n");
     }
     if (this.graphState < GraphState.Created)
     {
         object obj2;
         GC.Collect();
         this.graphBuilder = (IGraphBuilder) Activator.CreateInstance(System.Type.GetTypeFromCLSID(Clsid.FilterGraph, true));
         Guid clsid = Clsid.CaptureGraphBuilder2;
         Guid gUID = typeof(ICaptureGraphBuilder2).GUID;
         this.captureGraphBuilder = (ICaptureGraphBuilder2) DsBugWO.CreateDsInstance(ref clsid, ref gUID);
         int errorCode = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder);
         if (errorCode < 0)
         {
             Marshal.ThrowExceptionForHR(errorCode);
         }
         if (this.VideoDevice != null)
         {
             this.videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker(this.VideoDevice.MonikerString);
             errorCode = this.graphBuilder.AddFilter(this.videoDeviceFilter, "Video Capture Device");
             if (errorCode < 0)
             {
                 Marshal.ThrowExceptionForHR(errorCode);
             }
         }
         if (this.AudioDevice != null)
         {
             this.audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker(this.AudioDevice.MonikerString);
             errorCode = this.graphBuilder.AddFilter(this.audioDeviceFilter, "Audio Capture Device");
             if (errorCode < 0)
             {
                 Marshal.ThrowExceptionForHR(errorCode);
             }
         }
         if (this.VideoCompressor != null)
         {
             this.videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker(this.VideoCompressor.MonikerString);
             errorCode = this.graphBuilder.AddFilter(this.videoCompressorFilter, "Video Compressor");
             if (errorCode < 0)
             {
                 Marshal.ThrowExceptionForHR(errorCode);
             }
         }
         if (this.AudioCompressor != null)
         {
             this.audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker(this.AudioCompressor.MonikerString);
             errorCode = this.graphBuilder.AddFilter(this.audioCompressorFilter, "Audio Compressor");
             if (errorCode < 0)
             {
                 Marshal.ThrowExceptionForHR(errorCode);
             }
         }
         Guid capture = PinCategory.Capture;
         Guid interleaved = MediaType.Interleaved;
         Guid riid = typeof(IAMStreamConfig).GUID;
         if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj2) != 0)
         {
             interleaved = MediaType.Video;
             if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj2) != 0)
             {
                 obj2 = null;
             }
         }
         this.videoStreamConfig = obj2 as IAMStreamConfig;
         obj2 = null;
         capture = PinCategory.Capture;
         interleaved = MediaType.Audio;
         riid = typeof(IAMStreamConfig).GUID;
         if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.audioDeviceFilter, ref riid, out obj2) != 0)
         {
             obj2 = null;
         }
         this.audioStreamConfig = obj2 as IAMStreamConfig;
         this.mediaControl = (IMediaControl) this.graphBuilder;
         if (this.videoSources != null)
         {
             this.videoSources.Dispose();
         }
         this.videoSources = null;
         if (this.audioSources != null)
         {
             this.audioSources.Dispose();
         }
         this.audioSources = null;
         if (this.propertyPages != null)
         {
             this.propertyPages.Dispose();
         }
         this.propertyPages = null;
         this.videoCaps = null;
         this.audioCaps = null;
         obj2 = null;
         capture = PinCategory.Capture;
         interleaved = MediaType.Interleaved;
         riid = typeof(IAMTVTuner).GUID;
         if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj2) != 0)
         {
             interleaved = MediaType.Video;
             if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj2) != 0)
             {
                 obj2 = null;
             }
         }
         IAMTVTuner tuner = obj2 as IAMTVTuner;
         if (tuner != null)
         {
             this.tuner = new DirectX.Capture.Tuner(tuner);
         }
         this.graphState = GraphState.Created;
     }
 }
        // Set resolution for the specified stream configuration
        private void SetResolution( IAMStreamConfig streamConfig, VideoCapabilities resolution )
        {
            if ( resolution == null )
            {
                return;
            }

            // iterate through device's capabilities to find mediaType for desired resolution
            int capabilitiesCount = 0, capabilitySize = 0;
            AMMediaType newMediaType = null;
            VideoStreamConfigCaps caps = new VideoStreamConfigCaps( );

            streamConfig.GetNumberOfCapabilities( out capabilitiesCount, out capabilitySize );

            for ( int i = 0; i < capabilitiesCount; i++ )
            {
                try
                {
                    VideoCapabilities vc = new VideoCapabilities( streamConfig, i );

                    if ( resolution == vc )
                    {
                        if ( streamConfig.GetStreamCaps( i, out newMediaType, caps ) == 0 )
                        {
                            break;
                        }
                    }
                }
                catch
                {
                }
            }

            // set the new format
            if ( newMediaType != null )
            {
                streamConfig.SetFormat( newMediaType );
                newMediaType.Dispose( );
            }
        }
        // Set frame's size and rate for the specified stream configuration
        private void SetFrameSizeAndRate( IAMStreamConfig streamConfig, Size size, int frameRate )
        {
            bool sizeSet = false;
            AMMediaType mediaType;

            // get current format
            streamConfig.GetFormat( out mediaType );

            // change frame size if required
            if ( ( size.Width != 0 ) && ( size.Height != 0 ) )
            {
                // iterate through device's capabilities to find mediaType for desired resolution
                int capabilitiesCount = 0, capabilitySize = 0;
                AMMediaType newMediaType = null;
                VideoStreamConfigCaps caps = new VideoStreamConfigCaps( );

                streamConfig.GetNumberOfCapabilities( out capabilitiesCount, out capabilitySize );

                for ( int i = 0; i < capabilitiesCount; i++ )
                {
                    if ( streamConfig.GetStreamCaps( i, out newMediaType, caps ) == 0 )
                    {
                        if ( caps.InputSize == size )
                        {
                            mediaType.Dispose( );
                            mediaType = newMediaType;
                            sizeSet = true;
                            break;
                        }
                        else
                        {
                            newMediaType.Dispose( );
                        }
                    }
                }
            }

            VideoInfoHeader infoHeader = (VideoInfoHeader) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader ) );

            // try changing size manually if failed finding mediaType before
            if ( ( size.Width != 0 ) && ( size.Height != 0 ) && ( !sizeSet ) )
            {
                infoHeader.BmiHeader.Width  = size.Width;
                infoHeader.BmiHeader.Height = size.Height;
            }
            // change frame rate if required
            if ( frameRate != 0 )
            {
                infoHeader.AverageTimePerFrame = 10000000 / frameRate;
            }

            // copy the media structure back
            Marshal.StructureToPtr( infoHeader, mediaType.FormatPtr, false );

            // set the new format
            streamConfig.SetFormat( mediaType );

            mediaType.Dispose( );
        }
Beispiel #60
-1
		/// <summary>
		///  Set the value of one member of the IAMStreamConfig format block.
		///  Helper function for several properties that expose
		///  video/audio settings from IAMStreamConfig.GetFormat().
		///  IAMStreamConfig.GetFormat() returns a AMMediaType struct.
		///  AMMediaType.formatPtr points to a format block structure.
		///  This format block structure may be one of several 
		///  types, the type being determined by AMMediaType.formatType.
		/// </summary>
		protected object setStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName, object newValue)
		{
			if ( streamConfig == null )
				throw new NotSupportedException();
			assertStopped();
			derenderGraph();

			object returnValue = null;
#if DSHOWNET
            IntPtr pmt = IntPtr.Zero;
#endif
            AMMediaType mediaType = new AMMediaType();

			try 
			{
				// Get the current format info
#if DSHOWNET
                int hr = streamConfig.GetFormat(out pmt);
#else
				int hr = streamConfig.GetFormat(out mediaType);
#endif
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );

#if DSHOWNET
                Marshal.PtrToStructure(pmt, mediaType);
#endif

				// The formatPtr member points to different structures
				// dependingon the formatType
				object formatStruct;
				if ( mediaType.formatType == FormatType.WaveEx )
					formatStruct = new WaveFormatEx();
				else if ( mediaType.formatType == FormatType.VideoInfo )
					formatStruct = new VideoInfoHeader();
				else if ( mediaType.formatType == FormatType.VideoInfo2 )
					formatStruct = new VideoInfoHeader2();
				else
					throw new NotSupportedException( "This device does not support a recognized format block." );

				// Retrieve the nested structure
				Marshal.PtrToStructure( mediaType.formatPtr, formatStruct );

				// Find the required field
				Type structType = formatStruct.GetType();
				FieldInfo fieldInfo = structType.GetField( fieldName );
				if ( fieldInfo == null )
					throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." );

				// Update the value of the field
				fieldInfo.SetValue( formatStruct, newValue );

				// Update fields that may depend on specific values of other attributes
				if (mediaType.formatType == FormatType.WaveEx)
				{
					WaveFormatEx waveFmt = formatStruct as WaveFormatEx;
					waveFmt.nBlockAlign = (short)(waveFmt.nChannels * waveFmt.wBitsPerSample / 8);
					waveFmt.nAvgBytesPerSec = waveFmt.nBlockAlign * waveFmt.nSamplesPerSec;
				}

                // PtrToStructure copies the data so we need to copy it back
				Marshal.StructureToPtr( formatStruct, mediaType.formatPtr, false ); 

				// Save the changes
				hr = streamConfig.SetFormat( mediaType );
				if ( hr != 0 )
					Marshal.ThrowExceptionForHR( hr );
			}
			finally
			{
				DsUtils.FreeAMMediaType( mediaType );
#if DSHOWNET
                Marshal.FreeCoTaskMem(pmt);
#endif
            }
			renderGraph();
			startPreviewIfNeeded();

			return( returnValue );
		}