/// <summary> /// Initializes a new instance of the <see cref="CaptureDeviceChooser"/> class. /// </summary> public CaptureDeviceChooser() { MF.Activate[] mediaSourceActivates; using (MF.MediaAttributes attributes = new MF.MediaAttributes()) { // Guid value taken from mfidl.h attributes.Set <Guid>( MF.CaptureDeviceAttributeKeys.SourceType, new Guid("8ac3587a-4ae7-42d8-99e0-0a6013eef90f")); // Query for all device mediaSourceActivates = MF.MediaFactory.EnumDeviceSources(attributes); } if (mediaSourceActivates == null) { mediaSourceActivates = new MF.Activate[0]; } // Create info objects m_captureDeviceInfos = new CaptureDeviceInfo[mediaSourceActivates.Length]; for (int loop = 0; loop < m_captureDeviceInfos.Length; loop++) { m_captureDeviceInfos[loop] = new CaptureDeviceInfo(mediaSourceActivates[loop]); } }
/// <summary> /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class. /// </summary> /// <param name="captureDevice">The capture device.</param> public MediaFoundationVideoReader(CaptureDeviceInfo captureDevice) { captureDevice.EnsureNotNullOrDisposed(nameof(captureDevice)); try { // Create the source reader using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1)) { // We need the 'EnableVideoProcessing' attribute because of the RGB32 format // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1); mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1); mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisconnectMediasourceOnShutdown, 1); // Create the MediaSource object by given capture device using (MF.MediaSource mediaSource = captureDevice.CreateMediaSource()) { // Create the source reader m_sourceReader = new MF.SourceReader(mediaSource, mediaAttributes); } } // Apply source configuration using (MF.MediaType mediaType = new MF.MediaType()) { mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video); mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32); m_sourceReader.SetCurrentMediaType( MF.SourceReaderIndex.FirstVideoStream, mediaType); m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true)); } // Read some information about the source using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream)) { long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize); m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong)); } // Get additional properties m_durationLong = 0; m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute( MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics); } catch (Exception) { this.Dispose(); throw; } }
/// <summary> /// Initializes a new instance of the <see cref="AsyncRealtimeVideoReader"/> class. /// </summary> /// <param name="captureDevice">The capture device.</param> /// <param name="immediateStart">True to start video reading immediately.</param> public AsyncRealtimeVideoReader(CaptureDeviceInfo captureDevice, bool immediateStart = true) : base(captureDevice) { // Start immediately if requested m_currentBufferLock = new object(); m_currentBuffer = null; m_currentBufferTimestamp = DateTime.MinValue; if (immediateStart) { this.Start(); } }
/// <summary> /// Initializes a new instance of the <see cref="FrameByFrameVideoReader"/> class. /// </summary> public FrameByFrameVideoReader(CaptureDeviceInfo captureDevice) : base(captureDevice) { }