private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution) { if (!(resolution == null)) { int count = 0; int size = 0; AMMediaType mediaType = null; VideoStreamConfigCaps streamConfigCaps = new VideoStreamConfigCaps(); streamConfig.GetNumberOfCapabilities(out count, out size); for (int i = 0; i < count; i++) { try { VideoCapabilities b = new VideoCapabilities(streamConfig, i); if (resolution == b && streamConfig.GetStreamCaps(i, out mediaType, streamConfigCaps) == 0) { break; } } catch { } } if (mediaType != null) { streamConfig.SetFormat(mediaType); mediaType.Dispose(); } } }
// Token: 0x06000040 RID: 64 RVA: 0x000033D0 File Offset: 0x000015D0 private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution) { if (resolution == null) { return; } int num = 0; int num2 = 0; AMMediaType ammediaType = null; VideoStreamConfigCaps streamConfigCaps = new VideoStreamConfigCaps(); streamConfig.GetNumberOfCapabilities(out num, out num2); for (int i = 0; i < num; i++) { try { VideoCapabilities b = new VideoCapabilities(streamConfig, i); if (resolution == b && streamConfig.GetStreamCaps(i, out ammediaType, streamConfigCaps) == 0) { break; } } catch { } } if (ammediaType != null) { streamConfig.SetFormat(ammediaType); ammediaType.Dispose(); } }
public int GetStreamCaps(int iIndex, out AMMediaType ppmt, out VideoStreamConfigCaps _caps) { #if HAMED_LOG_METHOD_INFO MethodBase method = new StackTrace().GetFrame(0).GetMethod(); Console.WriteLine(this.GetType().FullName + " - " + method.Name + " - " + method.ToString()); #endif ppmt = null; _caps = null; if (iIndex < 0) { return(E_INVALIDARG); } ppmt = new AMMediaType(); HRESULT hr = (HRESULT)GetMediaType(iIndex, ref ppmt); if (FAILED(hr)) { return(hr); } if (hr == VFW_S_NO_MORE_ITEMS) { return(S_FALSE); } hr = (HRESULT)GetDefaultCaps(iIndex, out _caps); return(hr); }
public int GetDefaultCaps(int nIndex, out VideoStreamConfigCaps _caps) { _caps = new VideoStreamConfigCaps(); _caps.guid = FormatType.VideoInfo; _caps.VideoStandard = AnalogVideoStandard.None; _caps.InputSize.Width = c_iDefaultWidth; _caps.InputSize.Height = c_iDefaultHeight; _caps.MinCroppingSize.Width = c_nMinWidth; _caps.MinCroppingSize.Height = c_nMinHeight; _caps.MaxCroppingSize.Width = c_nMaxWidth; _caps.MaxCroppingSize.Height = c_nMaxHeight; _caps.CropGranularityX = c_nGranularityW; _caps.CropGranularityY = c_nGranularityH; _caps.CropAlignX = 0; _caps.CropAlignY = 0; _caps.MinOutputSize.Width = _caps.MinCroppingSize.Width; _caps.MinOutputSize.Height = _caps.MinCroppingSize.Height; _caps.MaxOutputSize.Width = _caps.MaxCroppingSize.Width; _caps.MaxOutputSize.Height = _caps.MaxCroppingSize.Height; _caps.OutputGranularityX = _caps.CropGranularityX; _caps.OutputGranularityY = _caps.CropGranularityY; _caps.StretchTapsX = 0; _caps.StretchTapsY = 0; _caps.ShrinkTapsX = 0; _caps.ShrinkTapsY = 0; _caps.MinFrameInterval = UNITS / c_nMaxFPS; _caps.MaxFrameInterval = UNITS / c_nMinFPS; _caps.MinBitsPerSecond = (_caps.MinOutputSize.Width * _caps.MinOutputSize.Height * c_nDefaultBitCount) * c_nMinFPS; _caps.MaxBitsPerSecond = (_caps.MaxOutputSize.Width * _caps.MaxOutputSize.Height * c_nDefaultBitCount) * c_nMaxFPS; return(NOERROR); }
// Retrieve capabilities of a video device internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType mediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps( ); try { // retrieve capabilities struct at the specified index int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } // extract info FrameSize = caps.InputSize; MaxFrameRate = (int)(10000000 / caps.MinFrameInterval); } finally { if (mediaType != null) { mediaType.Dispose( ); } } }
// Set frame's size and rate for the specified stream configuration private void SetFrameSizeAndRate(IAMStreamConfig streamConfig, Size size, int frameRate) { bool sizeSet = false; AMMediaType mediaType; // get current format streamConfig.GetFormat(out mediaType); // change frame size if required if ((size.Width != 0) && (size.Height != 0)) { // iterate through device's capabilities to find mediaType for desired resolution int capabilitiesCount = 0, capabilitySize = 0; AMMediaType newMediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps(); streamConfig.GetNumberOfCapabilities(out capabilitiesCount, out capabilitySize); for (int i = 0; i < capabilitiesCount; i++) { if (streamConfig.GetStreamCaps(i, out newMediaType, caps) == 0) { if (caps.InputSize == size) { mediaType.Dispose(); mediaType = newMediaType; sizeSet = true; break; } else { newMediaType.Dispose(); } } } } VideoInfoHeader infoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); // try changing size manually if failed finding mediaType before if ((size.Width != 0) && (size.Height != 0) && (!sizeSet)) { infoHeader.BmiHeader.Width = size.Width; infoHeader.BmiHeader.Height = size.Height; } // change frame rate if required if (frameRate != 0) { infoHeader.AverageTimePerFrame = 10000000 / frameRate; } // copy the media structure back Marshal.StructureToPtr(infoHeader, mediaType.FormatPtr, false); // set the new format streamConfig.SetFormat(mediaType); mediaType.Dispose(); }
// Retrieve capabilities of a video device internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType mediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps( ); try { // retrieve capabilities struct at the specified index int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } if (mediaType.FormatType == FormatType.VideoInfo) { VideoInfoHeader videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); MediaType = GUID.GetNickname(mediaType.SubType); FrameSize = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame); MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval); MinimumFrameRate = (int)(10000000 / caps.MaxFrameInterval); } else if (mediaType.FormatType == FormatType.VideoInfo2) { VideoInfoHeader2 videoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader2)); MediaType = GUID.GetNickname(mediaType.SubType); FrameSize = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame); MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval); MinimumFrameRate = (int)(10000000 / caps.MaxFrameInterval); } else { throw new ApplicationException("Unsupported format found."); } // ignore 12 bpp formats for now, since it was noticed they cause issues on Windows 8 // TODO: proper fix needs to be done so ICaptureGraphBuilder2::RenderStream() does not fail // on such formats if (BitCount <= 12) { //throw new ApplicationException( "Unsupported format found." ); } } finally { if (mediaType != null) { mediaType.Dispose( ); } } }
internal VideoCapabilities(IAMStreamConfig videoStreamConfig) { if (videoStreamConfig == null) { throw new ArgumentNullException("videoStreamConfig"); } AMMediaType mediaType = null; VideoStreamConfigCaps caps = null; IntPtr zero = IntPtr.Zero; try { IntPtr ptr2; int num; int num2; int numberOfCapabilities = videoStreamConfig.GetNumberOfCapabilities(out num, out num2); if (numberOfCapabilities != 0) { Marshal.ThrowExceptionForHR(numberOfCapabilities); } if (num <= 0) { throw new NotSupportedException("This video device does not report capabilities."); } if (num2 > Marshal.SizeOf(typeof(VideoStreamConfigCaps))) { throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure."); } zero = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); numberOfCapabilities = videoStreamConfig.GetStreamCaps(0, out ptr2, zero); if (numberOfCapabilities != 0) { Marshal.ThrowExceptionForHR(numberOfCapabilities); } mediaType = (AMMediaType)Marshal.PtrToStructure(ptr2, typeof(AMMediaType)); caps = (VideoStreamConfigCaps)Marshal.PtrToStructure(zero, typeof(VideoStreamConfigCaps)); this.InputSize = caps.InputSize; this.MinFrameSize = caps.MinOutputSize; this.MaxFrameSize = caps.MaxOutputSize; this.FrameSizeGranularityX = caps.OutputGranularityX; this.FrameSizeGranularityY = caps.OutputGranularityY; this.MinFrameRate = 10000000.0 / ((double)caps.MaxFrameInterval); this.MaxFrameRate = 10000000.0 / ((double)caps.MinFrameInterval); } finally { if (zero != IntPtr.Zero) { Marshal.FreeCoTaskMem(zero); } zero = IntPtr.Zero; if (mediaType != null) { DsUtils.FreeAMMediaType(mediaType); } mediaType = null; } }
public void GetCaptureSupportSize(string sFriendlyName) { ICaptureGraphBuilder2 captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); IBaseFilter pbf = this.CreateFilter(FilterCategory.VideoInputDevice, sFriendlyName); object obj; int hr = captureGraphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, pbf, typeof(IAMStreamConfig).GUID, out obj); DsError.ThrowExceptionForHR(hr); IAMStreamConfig iamstreamConfig = obj as IAMStreamConfig; bool flag = iamstreamConfig == null; if (flag) { MessageBox.Show("获取IAMStreamConfig失败!"); } else { VideoStreamConfigCaps structure = new VideoStreamConfigCaps(); int num; int num2; hr = iamstreamConfig.GetNumberOfCapabilities(out num, out num2); DsError.ThrowExceptionForHR(hr); bool flag2 = Marshal.SizeOf(structure) != num2; if (flag2) { MessageBox.Show("获取分辨率失败!"); } else { this.cmbResolution.Items.Clear(); for (int i = 0; i < num; i++) { IntPtr intPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(structure)); AMMediaType ammediaType = new AMMediaType(); hr = iamstreamConfig.GetStreamCaps(i, out ammediaType, intPtr); DsError.ThrowExceptionForHR(hr); bool flag3 = ammediaType.majorType == MediaType.Video && ammediaType.formatType == FormatType.VideoInfo; if (flag3) { Marshal.StructureToPtr(structure, intPtr, false); VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(ammediaType.formatPtr, videoInfoHeader); int width = videoInfoHeader.BmiHeader.Width; int height = videoInfoHeader.BmiHeader.Height; this.cmbResolution.Items.Add(width + "*" + height); } Marshal.FreeCoTaskMem(intPtr); DsUtils.FreeAMMediaType(ammediaType); } this.cmbResolution.SelectedIndex = ((this.cmbResolution.Items.Count > 0) ? 0 : -1); } } }
internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType mediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps( ); try { int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } if (mediaType.FormatType == FormatType.VideoInfo) { VideoInfoHeader videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); FrameSize = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame); MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval); } else if (mediaType.FormatType == FormatType.VideoInfo2) { VideoInfoHeader2 videoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader2)); FrameSize = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame); MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval); } else { throw new ApplicationException("Unsupported format found."); } if (BitCount <= 12) { throw new ApplicationException("Unsupported format found."); } } finally { if (mediaType != null) { mediaType.Dispose( ); } } }
// Retrieve capabilities of a video device internal VideoCapabilities( IAMStreamConfig videoStreamConfig, int index ) { AMMediaType mediaType = null; var caps = new VideoStreamConfigCaps( ); try { // retrieve capabilities struct at the specified index int hr = videoStreamConfig.GetStreamCaps( index, out mediaType, caps ); if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); if ( mediaType.FormatType == FormatType.VideoInfo ) { var videoInfo = (VideoInfoHeader) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader ) ); FrameSize = new Size( videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height ); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int) ( 10000000 / videoInfo.AverageTimePerFrame ); MaximumFrameRate = (int) ( 10000000 / caps.MinFrameInterval ); } else if ( mediaType.FormatType == FormatType.VideoInfo2 ) { var videoInfo = (VideoInfoHeader2) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader2 ) ); FrameSize = new Size( videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height ); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int) ( 10000000 / videoInfo.AverageTimePerFrame ); MaximumFrameRate = (int) ( 10000000 / caps.MinFrameInterval ); } else { throw new ApplicationException( "Unsupported format found." ); } // ignore 12 bpp formats for now, since it was noticed they cause issues on Windows 8 // TODO: proper fix needs to be done so ICaptureGraphBuilder2::RenderStream() does not fail // on such formats if ( BitCount <= 12 ) { throw new ApplicationException( "Unsupported format found." ); } } finally { if ( mediaType != null ) mediaType.Dispose( ); } }
// Retrieve capabilities of a video device internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType mediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps(); // retrieve capabilities struct at the specified index int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps); if (hr != 0) Marshal.ThrowExceptionForHR(hr); // extract info MediaType = mediaType; FrameSize = caps.InputSize; MaxFrameRate = (int)(10000000 / caps.MinFrameInterval); }
// Token: 0x06000027 RID: 39 RVA: 0x00002718 File Offset: 0x00000918 internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType ammediaType = null; VideoStreamConfigCaps videoStreamConfigCaps = new VideoStreamConfigCaps(); try { int streamCaps = videoStreamConfig.GetStreamCaps(index, out ammediaType, videoStreamConfigCaps); if (streamCaps != 0) { Marshal.ThrowExceptionForHR(streamCaps); } if (ammediaType.FormatType == FormatType.VideoInfo) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(ammediaType.FormatPtr, typeof(VideoInfoHeader)); this.FrameSize = new Size(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height); this.BitCount = (int)videoInfoHeader.BmiHeader.BitCount; this.AverageFrameRate = (int)(10000000L / videoInfoHeader.AverageTimePerFrame); this.MaximumFrameRate = (int)(10000000L / videoStreamConfigCaps.MinFrameInterval); } else { if (!(ammediaType.FormatType == FormatType.VideoInfo2)) { throw new ApplicationException("Unsupported format found."); } VideoInfoHeader2 videoInfoHeader2 = (VideoInfoHeader2)Marshal.PtrToStructure(ammediaType.FormatPtr, typeof(VideoInfoHeader2)); this.FrameSize = new Size(videoInfoHeader2.BmiHeader.Width, videoInfoHeader2.BmiHeader.Height); this.BitCount = (int)videoInfoHeader2.BmiHeader.BitCount; this.AverageFrameRate = (int)(10000000L / videoInfoHeader2.AverageTimePerFrame); this.MaximumFrameRate = (int)(10000000L / videoStreamConfigCaps.MinFrameInterval); } if (this.BitCount <= 12) { throw new ApplicationException("Unsupported format found."); } } finally { if (ammediaType != null) { ammediaType.Dispose(); } } }
void TestStreamCaps() { int hr; IntPtr pss; AMMediaType pmt; int iCount, iSize; hr = m_asc.GetNumberOfCapabilities(out iCount, out iSize); DsError.ThrowExceptionForHR(hr); pss = Marshal.AllocCoTaskMem(iCount * iSize); DsError.ThrowExceptionForHR(hr); if (iSize == Marshal.SizeOf(typeof(VideoStreamConfigCaps))) { for (int x = 0; x < iCount; x++) { hr = m_asc.GetStreamCaps(x, out pmt, pss); DsError.ThrowExceptionForHR(hr); VideoStreamConfigCaps vscc = (VideoStreamConfigCaps)Marshal.PtrToStructure(pss, typeof(VideoStreamConfigCaps)); pss = (IntPtr)(pss.ToInt64() + Marshal.SizeOf(typeof(VideoStreamConfigCaps))); } } else if (iSize == Marshal.SizeOf(typeof(AudioStreamConfigCaps))) { for (int x = 0; x < iCount; x++) { hr = m_asc.GetStreamCaps(x, out pmt, pss); DsError.ThrowExceptionForHR(hr); AudioStreamConfigCaps vscc = (AudioStreamConfigCaps)Marshal.PtrToStructure(pss, typeof(AudioStreamConfigCaps)); pss = (IntPtr)(pss.ToInt64() + Marshal.SizeOf(typeof(AudioStreamConfigCaps))); } } else { Debug.Assert(false, "GetStreamCaps"); } Marshal.FreeCoTaskMem(pss); }
private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution) { if (resolution == null) { return; } int capabilitiesCount = 0, capabilitySize = 0; AMMediaType newMediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps( ); streamConfig.GetNumberOfCapabilities(out capabilitiesCount, out capabilitySize); for (int i = 0; i < capabilitiesCount; i++) { try { VideoCapabilities vc = new VideoCapabilities(streamConfig, i); if (resolution == vc) { if (streamConfig.GetStreamCaps(i, out newMediaType, caps) == 0) { break; } } } catch { } } if (newMediaType != null) { streamConfig.SetFormat(newMediaType); newMediaType.Dispose( ); } }
public int GetDefaultCaps(int nIndex, out VideoStreamConfigCaps _caps) { #if HAMED_LOG_METHOD_INFO MethodBase method = new StackTrace().GetFrame(0).GetMethod(); Console.WriteLine(this.GetType().FullName + " - " + method.Name + " - " + method.ToString()); #endif _caps = new VideoStreamConfigCaps(); _caps.guid = FormatType.VideoInfo; _caps.VideoStandard = AnalogVideoStandard.None; _caps.InputSize.Width = c_iDefaultWidth; _caps.InputSize.Height = c_iDefaultHeight; _caps.MinCroppingSize.Width = c_nMinWidth; _caps.MinCroppingSize.Height = c_nMinHeight; _caps.MaxCroppingSize.Width = c_nMaxWidth; _caps.MaxCroppingSize.Height = c_nMaxHeight; _caps.CropGranularityX = c_nGranularityW; _caps.CropGranularityY = c_nGranularityH; _caps.CropAlignX = 0; _caps.CropAlignY = 0; _caps.MinOutputSize.Width = _caps.MinCroppingSize.Width; _caps.MinOutputSize.Height = _caps.MinCroppingSize.Height; _caps.MaxOutputSize.Width = _caps.MaxCroppingSize.Width; _caps.MaxOutputSize.Height = _caps.MaxCroppingSize.Height; _caps.OutputGranularityX = _caps.CropGranularityX; _caps.OutputGranularityY = _caps.CropGranularityY; _caps.StretchTapsX = 0; _caps.StretchTapsY = 0; _caps.ShrinkTapsX = 0; _caps.ShrinkTapsY = 0; _caps.MinFrameInterval = UNITS / c_nMaxFPS; _caps.MaxFrameInterval = UNITS / c_nMinFPS; _caps.MinBitsPerSecond = (_caps.MinOutputSize.Width * _caps.MinOutputSize.Height * c_nDefaultBitCount) * c_nMinFPS; _caps.MaxBitsPerSecond = (_caps.MaxOutputSize.Width * _caps.MaxOutputSize.Height * c_nDefaultBitCount) * c_nMaxFPS; return(NOERROR); }
public int GetStreamCaps(int iIndex, out AMMediaType ppmt, out VideoStreamConfigCaps _caps) { ppmt = null; _caps = null; if (iIndex < 0) { return(E_INVALIDARG); } ppmt = new AMMediaType(); HRESULT hr = (HRESULT)GetMediaType(iIndex, ref ppmt); if (FAILED(hr)) { return(hr); } if (hr == VFW_S_NO_MORE_ITEMS) { return(S_FALSE); } hr = (HRESULT)GetDefaultCaps(iIndex, out _caps); return(hr); }
// Token: 0x0600037A RID: 890 RVA: 0x00014720 File Offset: 0x00012920 internal VideoCapabilities(IAMStreamConfig videoStreamConfig) { AMMediaType ammediaType = null; IntPtr intPtr = IntPtr.Zero; try { int num2; int num3; int num = videoStreamConfig.GetNumberOfCapabilities(ref num2, ref num3); intPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); IntPtr ptr; num = videoStreamConfig.GetStreamCaps(0, out ptr, intPtr); ammediaType = (AMMediaType)Marshal.PtrToStructure(ptr, typeof(AMMediaType)); VideoStreamConfigCaps videoStreamConfigCaps = (VideoStreamConfigCaps)Marshal.PtrToStructure(intPtr, typeof(VideoStreamConfigCaps)); this.InputSize = videoStreamConfigCaps.InputSize; this.MinFrameSize = videoStreamConfigCaps.MinOutputSize; this.MaxFrameSize = videoStreamConfigCaps.MaxOutputSize; this.FrameSizeGranularityX = videoStreamConfigCaps.OutputGranularityX; this.FrameSizeGranularityY = videoStreamConfigCaps.OutputGranularityY; this.MinFrameRate = 10000000.0 / (double)videoStreamConfigCaps.MaxFrameInterval; this.MaxFrameRate = 10000000.0 / (double)videoStreamConfigCaps.MinFrameInterval; } finally { if (intPtr != IntPtr.Zero) { Marshal.FreeCoTaskMem(intPtr); } intPtr = IntPtr.Zero; if (ammediaType != null) { DsUtils.FreeAMMediaType(ammediaType); } ammediaType = null; } }
/// <summary> /// Retrieve capabilities of a video device /// </summary> /// <param name="videoStreamConfig">The video stream configuration.</param> internal VideoCapabilities(IAMStreamConfig videoStreamConfig) { if (videoStreamConfig == null) { throw new ArgumentNullException("videoStreamConfig"); } AMMediaType mediaType = null; VideoStreamConfigCaps caps = null; IntPtr pCaps = IntPtr.Zero; try { // Ensure this device reports capabilities int c, size; int hr = videoStreamConfig.GetNumberOfCapabilities(out c, out size); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } if (c <= 0) { throw new NotSupportedException("This video device does not report capabilities."); } if (size > Marshal.SizeOf(typeof(VideoStreamConfigCaps))) { throw new NotSupportedException("Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure."); } // Alloc memory for structure pCaps = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); // Retrieve first (and hopefully only) capabilities struct hr = videoStreamConfig.GetStreamCaps(0, out mediaType, pCaps); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } // Convert pointers to managed structures caps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pCaps, typeof(VideoStreamConfigCaps)); // Extract info InputSize = caps.InputSize; MinFrameSize = caps.MinOutputSize; MaxFrameSize = caps.MaxOutputSize; FrameSizeGranularityX = caps.OutputGranularityX; FrameSizeGranularityY = caps.OutputGranularityY; MinFrameRate = (double)10000000 / caps.MaxFrameInterval; MaxFrameRate = (double)10000000 / caps.MinFrameInterval; } finally { if (pCaps != IntPtr.Zero) { Marshal.FreeCoTaskMem(pCaps); } pCaps = IntPtr.Zero; if (mediaType != null) { DsUtils.FreeAMMediaType(mediaType); } mediaType = null; } }
/// <summary> /// This method returns the capabilities of the current selected video device. /// That is a list of video sizes and a list of frame rates. /// </summary> /// <param name="videoDevice">[in] A <see cref="IBaseFilter"/> thats properties should be received.</param> /// <param name="videoSizes">[out] A <see cref="List{Size}"/> with valid video sizes.</param> /// <param name="frameRates">[out] A <see cref="List{Int32}"/> with valid frame rates.</param> /// <returns><strong>True</strong>, if parsing was successfull, otherwise <strong>false</strong></returns> public static bool GetVideoCaps(IBaseFilter videoDevice, out List <Size> videoSizes, out List <int> frameRates) { int hr; object o; int pinCount; int pinSize; videoSizes = new List <Size>(); frameRates = new List <int>(); if (videoDevice == null) { return(false); } // Create the Graph IGraphBuilder localGraphBuilder = (IGraphBuilder) new FilterGraph(); // Create the Capture Graph Builder ICaptureGraphBuilder2 captureGraphBuilder = null; captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); try { // Attach the filter graph to the capture graph hr = captureGraphBuilder.SetFiltergraph(localGraphBuilder); DsError.ThrowExceptionForHR(hr); // Add the Video input device to the graph hr = localGraphBuilder.AddFilter(videoDevice, "video source filter"); DsError.ThrowExceptionForHR(hr); // Find the stream config interface hr = captureGraphBuilder.FindInterface( PinCategory.Capture, MediaType.Video, videoDevice, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(hr); IAMStreamConfig videoStreamConfig = o as IAMStreamConfig; if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } hr = videoStreamConfig.GetNumberOfCapabilities(out pinCount, out pinSize); DsError.ThrowExceptionForHR(hr); AMMediaType media; // copy out the videoinfoheader VideoStreamConfigCaps caps = new VideoStreamConfigCaps(); IntPtr capsPtr = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); Marshal.StructureToPtr(caps, capsPtr, false); for (int i = 0; i < pinCount; i++) { hr = videoStreamConfig.GetStreamCaps(i, out media, capsPtr); DsError.ThrowExceptionForHR(hr); Marshal.PtrToStructure(capsPtr, caps); // Get valid framerates int maxRate = (int)(10000000f / caps.MinFrameInterval); int minRate = (int)(10000000f / caps.MaxFrameInterval); // Paranoia check for wrong intialized web cams // which don´t use nano second units, instead using real frame rates if (caps.MinFrameInterval < 100) { minRate = (int)caps.MinFrameInterval; maxRate = (int)caps.MaxFrameInterval; } for (int j = minRate; j <= maxRate; j++) { if (!frameRates.Contains(j)) { frameRates.Add(j); } } // Get valid video sizes if (caps.MinOutputSize != caps.MaxOutputSize && caps.OutputGranularityX != 0) { int count = (caps.MaxOutputSize.Width - caps.MinOutputSize.Width) / caps.OutputGranularityX; for (int j = 0; j <= count; j++) { Size newSize = caps.MinOutputSize; newSize.Width += caps.OutputGranularityX * j; newSize.Height += caps.OutputGranularityY * j; if (!videoSizes.Contains(newSize)) { videoSizes.Add(newSize); } } } else { if (!videoSizes.Contains(caps.MinOutputSize)) { videoSizes.Add(caps.MinOutputSize); } } DsUtils.FreeAMMediaType(media); } Marshal.FreeHGlobal(capsPtr); } catch (Exception ex) { MessageBox.Show(ex.Message); return(false); } finally { if (localGraphBuilder != null) { Marshal.ReleaseComObject(localGraphBuilder); localGraphBuilder = null; } if (captureGraphBuilder != null) { Marshal.ReleaseComObject(captureGraphBuilder); captureGraphBuilder = null; } } return(true); }
// ----------------- Constructor --------------------- /// <summary> Retrieve capabilities of a video device </summary> internal VideoCapabilities(IAMStreamConfig videoStreamConfig) { if ( videoStreamConfig == null ) throw new ArgumentNullException( "videoStreamConfig" ); AMMediaType mediaType = null; VideoStreamConfigCaps caps = null; IntPtr pCaps = IntPtr.Zero; IntPtr pMediaType; try { // Ensure this device reports capabilities int c, size; int hr = videoStreamConfig.GetNumberOfCapabilities( out c, out size ); if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); if ( c <= 0 ) throw new NotSupportedException( "This video device does not report capabilities." ); if ( size > Marshal.SizeOf( typeof( VideoStreamConfigCaps ) ) ) throw new NotSupportedException( "Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure." ); if ( c > 1 ) Debug.WriteLine("This video device supports " + c + " capability structures. Only the first structure will be used." ); // Alloc memory for structure pCaps = Marshal.AllocCoTaskMem( Marshal.SizeOf( typeof( VideoStreamConfigCaps ) ) ); #if DEBUG for(int i = c - 1; i >= 0; i--) { hr = videoStreamConfig.GetStreamCaps(i, out pMediaType, pCaps); #else // Retrieve first (and hopefully only) capabilities struct hr = videoStreamConfig.GetStreamCaps( 0, out pMediaType, pCaps ); #endif if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); // Convert pointers to managed structures mediaType = (AMMediaType)Marshal.PtrToStructure(pMediaType, typeof(AMMediaType)); // Convert pointers to managed structures caps = (VideoStreamConfigCaps) Marshal.PtrToStructure(pCaps, typeof(VideoStreamConfigCaps)); // Extract info InputSize = caps.InputSize; MinFrameSize = caps.MinOutputSize; MaxFrameSize = caps.MaxOutputSize; FrameSizeGranularityX = caps.OutputGranularityX; FrameSizeGranularityY = caps.OutputGranularityY; MinFrameRate = (double)10000000 / caps.MaxFrameInterval; MaxFrameRate = (double)10000000 / caps.MinFrameInterval; #if DEBUG if (caps.VideoStandard > AnalogVideoStandard.None) { Debug.WriteLine("Caps=" + caps.InputSize + " " + caps.MinOutputSize + " " + caps.MaxOutputSize + " " + MinFrameRate + "-" + MaxFrameRate + " " + caps.VideoStandard); Debug.WriteLine("MediaType=" + mediaType.majorType + " " + mediaType.subType + " " + mediaType.formatType + " " + mediaType.formatSize + " " + mediaType.fixedSizeSamples + " " + mediaType.sampleSize + " " + mediaType.temporalCompression); } } #endif } finally { if ( pCaps != IntPtr.Zero ) Marshal.FreeCoTaskMem( pCaps ); pCaps = IntPtr.Zero; if ( mediaType != null ) DsUtils.FreeAMMediaType( mediaType ); mediaType = null; } }
/// <summary> /// Set the video capabilities. /// </summary> /// <param name="bldr">Specifies the capture builder</param> /// <param name="flt">Specifies the video filter.</param> /// <param name="vidCap">Specifies the desired capabilities.</param> /// <returns><i>true</i> is returned if set, otherwise <i>false</i>.</returns> /// <remarks> /// @see http://blog.dvdbuilder.com/setting-video-capture-format-directshow-net /// </remarks> private bool setVideoCapabilities(ICaptureGraphBuilder2 bldr, IBaseFilter flt, VideoCapability vidCap) { int hr; Guid cat = PinCategory.Capture; Guid type = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; object comObj = null; IntPtr pSC = IntPtr.Zero; AMMediaType mt = null; try { hr = bldr.FindInterface(ref cat, ref type, flt, ref iid, out comObj); if (hr != 0) { type = MediaType.Video; hr = bldr.FindInterface(ref cat, ref type, flt, ref iid, out comObj); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IAMStreamConfig cfg = comObj as IAMStreamConfig; int nCount; int nSize; hr = cfg.GetNumberOfCapabilities(out nCount, out nSize); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } VideoInfoHeader vih = new VideoInfoHeader(); VideoStreamConfigCaps vsc = new VideoStreamConfigCaps(); pSC = Marshal.AllocCoTaskMem(nSize); for (int i = 0; i < nCount; i++) { mt = null; IntPtr pMT; hr = cfg.GetStreamCaps(i, out pMT, pSC); if (hr == 0) { mt = Marshal.PtrToStructure <AMMediaType>(pMT); Marshal.PtrToStructure(mt.formatPtr, vih); Marshal.PtrToStructure(pSC, vsc); int nMinFps = (int)(10000000 / vsc.MaxFrameInterval); int nMaxFps = (int)(10000000 / vsc.MinFrameInterval); if ((vih.BmiHeader.Width == vidCap.Width || vidCap.Width == 0) && (vih.BmiHeader.Height == vidCap.Height || vidCap.Height == 0) && ((nMinFps <= vidCap.TargetFPS && nMaxFps >= vidCap.TargetFPS) || vidCap.TargetFPS == 0)) { break; } } if (mt != null) { Marshal.FreeCoTaskMem(mt.formatPtr); mt = null; } } if (mt == null) { return(false); } cfg.SetFormat(mt); } catch (Exception excpt) { return(false); } finally { if (comObj != null) { Marshal.ReleaseComObject(comObj); } if (pSC != IntPtr.Zero) { Marshal.FreeCoTaskMem(pSC); } if (mt != null) { Marshal.FreeCoTaskMem(mt.formatPtr); } } return(true); }
/// <summary> /// Returns the video capabilities of the video device. /// </summary> /// <param name="filter">Specifies the video device.</param> /// <returns>A collection of video capabilities is returned for the device.</returns> public VideoCapabilityCollection GetVideoCapatiblities(Filter filter) { int hr; IFilterGraph2 grph = null; IBaseFilter camFltr = null; ICaptureGraphBuilder2 bldr = null; object comObj = null; AMMediaType mt = null; IntPtr pSC = IntPtr.Zero; VideoCapabilityCollection colCap = new VideoCapabilityCollection(); try { if (filter == null) { return(colCap); } grph = (IFilterGraph2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); IMoniker moniker = filter.CreateMoniker(); grph.AddSourceFilterForMoniker(moniker, null, filter.Name, out camFltr); Marshal.ReleaseComObject(moniker); bldr = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2, true)); hr = bldr.SetFiltergraph(grph as IGraphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Add the web-cam filter to the graph. hr = grph.AddFilter(camFltr, filter.Name); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the IAMStreamConfig interface. Guid cat = PinCategory.Capture; Guid type = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; hr = bldr.FindInterface(ref cat, ref type, camFltr, ref iid, out comObj); if (hr < 0) { type = MediaType.Video; hr = bldr.FindInterface(ref cat, ref type, camFltr, ref iid, out comObj); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IAMStreamConfig cfg = comObj as IAMStreamConfig; // Enumerate the video capabilities. int nCount; int nSize; hr = cfg.GetNumberOfCapabilities(out nCount, out nSize); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } VideoInfoHeader vih = new VideoInfoHeader(); VideoStreamConfigCaps vsc = new VideoStreamConfigCaps(); pSC = Marshal.AllocCoTaskMem(nSize); for (int i = 0; i < nCount; i++) { IntPtr pMT; hr = cfg.GetStreamCaps(i, out pMT, pSC); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } mt = Marshal.PtrToStructure <AMMediaType>(pMT); Marshal.PtrToStructure(mt.formatPtr, vih); Marshal.PtrToStructure(pSC, vsc); int nWidth = vih.BmiHeader.Width; int nHeight = vih.BmiHeader.Height; int nFpsMin = (int)(10000000 / vsc.MaxFrameInterval); int nFpsMax = (int)(10000000 / vsc.MinFrameInterval); colCap.Add(new VideoCapability(nWidth, nHeight, nFpsMin, nFpsMax)); if (mt != null) { Marshal.FreeCoTaskMem(mt.formatPtr); mt = null; } } } catch (Exception excpt) { throw excpt; } finally { if (mt != null) { Marshal.FreeCoTaskMem(mt.formatPtr); } if (comObj != null) { Marshal.ReleaseComObject(comObj); } if (pSC != IntPtr.Zero) { Marshal.FreeCoTaskMem(pSC); } if (bldr != null) { Marshal.ReleaseComObject(bldr); } if (camFltr != null) { Marshal.ReleaseComObject(camFltr); } if (grph != null) { Marshal.ReleaseComObject(grph); } } return(colCap); }
private void SetCaptureResolution(ChannelAnalogic.CaptureFormat captureFormat) //Size captureResolution) { object o = null; int hr = this.captureGraphBuilder.FindInterface(null, //PinCategory.Preview, // Preview pin. MediaType.Video, //null, // Any media type. this.videoCaptureFilter, // Pointer to the capture filter. typeof(IAMStreamConfig).GUID, out o); if (hr >= 0) { IAMStreamConfig amStreamConfig = o as IAMStreamConfig; AMMediaType mediaType; hr = amStreamConfig.GetFormat(out mediaType); if (hr >= 0) { if ((mediaType.majorType == MediaType.Video) && (mediaType.formatType == FormatType.VideoInfo) && (mediaType.formatSize >= Marshal.SizeOf(typeof(VideoInfoHeader))) && (mediaType.formatPtr != IntPtr.Zero)) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); Size resolution = new Size(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height); int framePerSecond = (int)(10000000.0 / videoInfoHeader.AvgTimePerFrame); string mediaSubType = (string)DeviceEnumerator.MediaSubTypeByGUID[mediaType.subType]; if (captureFormat.Resolution == resolution && captureFormat.FramePerSecond == framePerSecond && captureFormat.MediaSubType == mediaSubType) { return; } } DsUtils.FreeAMMediaType(mediaType); } int iCount = 0, iSize = 0; hr = amStreamConfig.GetNumberOfCapabilities(out iCount, out iSize); // Check the size to make sure we pass in the correct structure. if (iSize == Marshal.SizeOf(typeof(VideoStreamConfigCaps))) { // Use the video capabilities structure. VideoStreamConfigCaps scc = new VideoStreamConfigCaps(); GCHandle gchScc = GCHandle.Alloc(scc, GCHandleType.Pinned); IntPtr pScc = gchScc.AddrOfPinnedObject(); for (int iFormat = 0; iFormat < iCount; iFormat++) { hr = amStreamConfig.GetStreamCaps(iFormat, out mediaType, pScc); if (hr >= 0) { if (mediaType != null && mediaType.majorType == MediaType.Video && mediaType.formatType == FormatType.VideoInfo && mediaType.formatSize >= Marshal.SizeOf(typeof(VideoInfoHeader)) && mediaType.formatPtr != IntPtr.Zero) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); Size resolution = new Size(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height); int framePerSecond = (int)(10000000.0 / videoInfoHeader.AvgTimePerFrame); string mediaSubType = (string)DeviceEnumerator.MediaSubTypeByGUID[mediaType.subType]; if (captureFormat.Resolution == resolution && captureFormat.FramePerSecond == framePerSecond && captureFormat.MediaSubType == mediaSubType) { StopGraph(); hr = amStreamConfig.SetFormat(mediaType); break; } DsUtils.FreeAMMediaType(mediaType); } } } gchScc.Free(); } } }
// Retrieve capabilities of a video device internal VideoCapabilities( IAMStreamConfig videoStreamConfig, int index ) { AMMediaType mediaType = null; var caps = new VideoStreamConfigCaps( ); try { // retrieve capabilities struct at the specified index int hr = videoStreamConfig.GetStreamCaps( index, out mediaType, caps ); if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); if ( mediaType.FormatType == FormatType.VideoInfo ) { var videoInfo = (VideoInfoHeader) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader ) ); FrameSize = new Size( videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height ); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int) ( 10000000 / videoInfo.AverageTimePerFrame ); MaximumFrameRate = (int) ( 10000000 / caps.MinFrameInterval ); } else if ( mediaType.FormatType == FormatType.VideoInfo2 ) { var videoInfo = (VideoInfoHeader2) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader2 ) ); FrameSize = new Size( videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height ); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int) ( 10000000 / videoInfo.AverageTimePerFrame ); MaximumFrameRate = (int) ( 10000000 / caps.MinFrameInterval ); } else { throw new ApplicationException( "Unsupported format found." ); } } finally { mediaType?.Dispose( ); } }
private (LocalVideoSourceCapability[] caps, InputDeviceState state) GetCapabilities(DsDevice device) { if (_initialLogging) { Log.Information($"Caps {device.Name}: getting"); } var list = new List <LocalVideoSourceCapability>(); IntPtr pCaps = IntPtr.Zero; IFilterGraph2 filterGraph2 = null; IBaseFilter sourceFilter = null; IAMStreamConfig streamConfig = null; object pin = null; InputDeviceState state = InputDeviceState.Ready; try { filterGraph2 = new FilterGraph() as IFilterGraph2; if (filterGraph2 == null) { throw new NotSupportedException("filter2 is null"); } LocalVideoSourceManager.AddCaptureFilter(filterGraph2, device, out sourceFilter); pin = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); if (pin == null) { pin = sourceFilter; } streamConfig = pin as IAMStreamConfig; if (streamConfig == null) { throw new NotSupportedException("pin is null"); } int count = 0; int size = 0; Checked(() => streamConfig.GetNumberOfCapabilities(out count, out size), "GetNumberOfCapabilities", null); if (count <= 0) { throw new NotSupportedException("This video source does not report capabilities."); } if (size != Marshal.SizeOf(typeof(VideoStreamConfigCaps))) { throw new NotSupportedException("Unable to retrieve video source capabilities. This video source requires a larger VideoStreamConfigCaps structure."); } // Alloc memory for structure pCaps = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); for (int i = 0; i < count; i++) { AMMediaType mediaType = null; Checked(() => streamConfig.GetStreamCaps(i, out mediaType, pCaps), "GetStreamCaps", null); VideoStreamConfigCaps caps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pCaps, typeof(VideoStreamConfigCaps)); var format = GetMediaTypeInfo(mediaType, out var height, out var width, out var compression, out var videoInfoHeader, out var videoInfoHeader2); var result = new LocalVideoSourceCapability() { MaxF = GetFps(caps.MinFrameInterval), MinF = GetFps(caps.MaxFrameInterval), Fmt = format, W = width, H = height, }; list.Add(result); } } catch (UnauthorizedAccessException e) { Log.Warning(e, $"Error during retreiving caps for '{device.Name}' (Locked)"); state = InputDeviceState.Locked; } catch (Exception e) { Log.Error(e, $"Error during retreiving caps for '{device.Name}'"); state = InputDeviceState.Failed; } finally { if (pCaps != IntPtr.Zero) { Marshal.FreeCoTaskMem(pCaps); } } try { ReleaseComObject(sourceFilter); ReleaseComObject(filterGraph2); ReleaseComObject(streamConfig); ReleaseComObject(pin); } catch (Exception e) { Log.Error(e, $"ReleaseComObject('{device.Name}') failed"); } if (_initialLogging) { Log.Information($"Caps {device.Name}: {string.Join("; ", list.Select(s => s.ToString()))}"); } return(list.ToArray(), state); }
private void InitFormatsList() { int hr = 0; if (null == VideoConfig) { throw new Exception("VideoConfig not set"); } // enumerate the capabilities of the video capture device int capsCount, capSize; hr = VideoConfig.GetNumberOfCapabilities(out capsCount, out capSize); DsError.ThrowExceptionForHR(hr); VideoInfoHeader vih = new VideoInfoHeader(); VideoStreamConfigCaps vsc = new VideoStreamConfigCaps(); IntPtr pSC = Marshal.AllocHGlobal(capSize); try { int videoFormatIndex = -1; int minFps = -1; int maxFps = -1; int currentWidth = 0; int currentHeight = 0; Guid currentSubType; int currentFps = 0; { AMMediaType mt = null; hr = VideoConfig.GetFormat(out mt); Marshal.PtrToStructure(mt.formatPtr, vih); DsError.ThrowExceptionForHR(hr); currentFps = (int)(10000000.0 / vih.AvgTimePerFrame); currentWidth = vih.BmiHeader.Width; currentHeight = vih.BmiHeader.Height; currentSubType = mt.subType; DsUtils.FreeAMMediaType(mt); } for (int i = 0; i < capsCount; ++i) { AMMediaType mt = null; // the video format is described in AMMediaType and VideoStreamConfigCaps hr = VideoConfig.GetStreamCaps(i, out mt, pSC); DsError.ThrowExceptionForHR(hr); if (mt.formatType == DirectShowLib.FormatType.VideoInfo) { string formatName = GetSubtypeString(mt.subType); if (!string.IsNullOrEmpty(formatName)) { // copy the unmanaged structures to managed in order to check the format Marshal.PtrToStructure(mt.formatPtr, vih); Marshal.PtrToStructure(pSC, vsc); int fps = (int)(10000000.0 / vsc.MaxFrameInterval); if ((minFps < 0) || (minFps > fps)) { minFps = fps; } fps = (int)(10000000.0 / vsc.MinFrameInterval); if ((maxFps < 0) || (maxFps < fps)) { maxFps = fps; } string capline = String.Format("{0} x {1}, min fps {2:0.}, max fps {3:0.}, {4}", vih.BmiHeader.Width, vih.BmiHeader.Height, 10000000.0 / vsc.MaxFrameInterval, 10000000.0 / vsc.MinFrameInterval, formatName); if ((vih.BmiHeader.Width == currentWidth) && (vih.BmiHeader.Height == currentHeight) && (mt.subType == currentSubType)) { videoFormatIndex = comboBoxFormats.Items.Count; } ComboboxItem item = new ComboboxItem(); item.Text = capline; item.Value = i; comboBoxFormats.Items.Add(item); } } DsUtils.FreeAMMediaType(mt); } if (videoFormatIndex >= 0) { comboBoxFormats.SelectedIndex = videoFormatIndex; } if ((minFps >= 0) && (maxFps >= 0)) { for (int i = minFps; i <= maxFps; i++) { ComboboxItem item = new ComboboxItem(); item.Text = i.ToString(); item.Value = i; comboBoxFrameRate.Items.Add(item); if (currentFps == i) { comboBoxFrameRate.SelectedIndex = comboBoxFrameRate.Items.Count - 1; } } } } finally { Marshal.FreeHGlobal(pSC); } }
// Set resolution for the specified stream configuration private void SetResolution( IAMStreamConfig streamConfig, VideoCapabilities resolution ) { if ( resolution == null ) { return; } // iterate through device's capabilities to find mediaType for desired resolution int capabilitiesCount, capabilitySize; AMMediaType newMediaType = null; var caps = new VideoStreamConfigCaps( ); streamConfig.GetNumberOfCapabilities( out capabilitiesCount, out capabilitySize ); for ( int i = 0; i < capabilitiesCount; i++ ) { try { var vc = new VideoCapabilities( streamConfig, i ); if ( resolution == vc ) { if ( streamConfig.GetStreamCaps( i, out newMediaType, caps ) == 0 ) { break; } } } catch { } } // set the new format if ( newMediaType != null ) { streamConfig.SetFormat( newMediaType ); newMediaType.Dispose( ); } }