/// <summary> /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class. /// </summary> /// <param name="captureDevice">The capture device.</param> public MediaFoundationVideoReader(CaptureDeviceInfo captureDevice) { captureDevice.EnsureNotNullOrDisposed(nameof(captureDevice)); try { // Create the source reader using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1)) { // We need the 'EnableVideoProcessing' attribute because of the RGB32 format // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1); mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1); mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisconnectMediasourceOnShutdown, 1); // Create the MediaSource object by given capture device using (MF.MediaSource mediaSource = captureDevice.CreateMediaSource()) { // Create the source reader m_sourceReader = new MF.SourceReader(mediaSource, mediaAttributes); } } // Apply source configuration using (MF.MediaType mediaType = new MF.MediaType()) { mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video); mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32); m_sourceReader.SetCurrentMediaType( MF.SourceReaderIndex.FirstVideoStream, mediaType); m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true)); } // Read some information about the source using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream)) { long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize); m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong)); } // Get additional properties m_durationLong = 0; m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute( MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics); } catch (Exception) { this.Dispose(); throw; } }
/// <summary> /// Gets the decoded PCM samples. See remarks. /// </summary> /// <param name="startingPositionInSeconds">The starting position in seconds.</param> /// <returns>An enumerator of pointer to PCM decoded data with the same format as returned by <see cref="WaveFormat"/>.</returns> /// <remarks> /// This method is only working as a single enumerator at a time. /// The <see cref="SetSourceStream(System.IO.Stream)"/> must be set before calling <see cref="GetSamples()"/> /// </remarks> public IEnumerable <DataPointer> GetSamples(TimeSpan startingPositionInSeconds) { // A new reader is setup, so initialize it. lock (sourceReaderLock) { // If the reader was changed if (nextSourceReader != null) { if (sourceReader != null) { sourceReader.Dispose(); } sourceReader = nextSourceReader; nextSourceReader = null; } } // Make sure that any prior call CleanupAndDispose(); CheckIfDisposed(); // Set the position sourceReader.SetCurrentPosition((long)(startingPositionInSeconds.Ticks)); while (true) { int streamIndex; SourceReaderFlags flags; long time; CheckIfDisposed(); using (currentSample = sourceReader.ReadSample(SourceReaderIndex.FirstAudioStream, SourceReaderControlFlags.None, out streamIndex, out flags, out time)) { if ((flags & SourceReaderFlags.Endofstream) != 0) { break; } CheckIfDisposed(); using (currentBuffer = currentSample.ConvertToContiguousBuffer()) { int bufferMaxLength; int bufferCurrentLength; CheckIfDisposed(); var ptr = currentBuffer.Lock(out bufferMaxLength, out bufferCurrentLength); yield return(new DataPointer(ptr, bufferCurrentLength)); // Warning, because the yield could never return here, currentBuffer and currentSample should be disposed when disposing this object or when // calling it again on the GetSamples method. // In case a Dispose occurred while decoding if (currentBuffer == null) { break; } currentBuffer.Unlock(); } } } // They have been disposed, so we can just clear them currentBuffer = null; currentSample = null; }
/// <summary> /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class. /// </summary> /// <param name="videoSource">The source video file.</param> public MediaFoundationVideoReader(ResourceLink videoSource) { videoSource.EnsureNotNull(nameof(videoSource)); try { m_videoSource = videoSource; // Create the source reader using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1)) { // We need the 'EnableVideoProcessing' attribute because of the RGB32 format // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1); mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1); // Wrap the .net stream to a MF Bytestream m_videoSourceStreamNet = m_videoSource.OpenInputStream(); m_videoSourceStream = new MF.ByteStream(m_videoSourceStreamNet); try { using (MF.MediaAttributes byteStreamAttributes = m_videoSourceStream.QueryInterface <MF.MediaAttributes>()) { byteStreamAttributes.Set(MF.ByteStreamAttributeKeys.OriginName, "Dummy." + videoSource.FileExtension); } } catch (SharpDXException) { // The interface MF.MediaAttributes is not available on some platforms // (occured during tests on Windows 7 without Platform Update) } // Create the sourcereader by custom native method (needed because of the ByteStream arg) IntPtr sourceReaderPointer = IntPtr.Zero; SharpDX.Result sdxResult = NativeMethods.MFCreateSourceReaderFromByteStream_Native( m_videoSourceStream.NativePointer, mediaAttributes.NativePointer, out sourceReaderPointer); sdxResult.CheckError(); m_sourceReader = new MF.SourceReader(sourceReaderPointer); } // Apply source configuration using (MF.MediaType mediaType = new MF.MediaType()) { mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video); mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32); m_sourceReader.SetCurrentMediaType( MF.SourceReaderIndex.FirstVideoStream, mediaType); m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true)); } // Read some information about the source using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream)) { long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize); m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong)); } // Get additional propertie3s m_durationLong = m_sourceReader.GetPresentationAttribute( MF.SourceReaderIndex.MediaSource, MF.PresentationDescriptionAttributeKeys.Duration); m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute( MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics); } catch (Exception) { this.Dispose(); throw; } }