/// <summary>
        /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class.
        /// </summary>
        /// <param name="captureDevice">The capture device.</param>
        public MediaFoundationVideoReader(CaptureDeviceInfo captureDevice)
        {
            captureDevice.EnsureNotNullOrDisposed(nameof(captureDevice));

            try
            {
                // Create the source reader
                using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1))
                {
                    // We need the 'EnableVideoProcessing' attribute because of the RGB32 format
                    // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1);
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1);
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisconnectMediasourceOnShutdown, 1);

                    // Create the MediaSource object by given capture device
                    using (MF.MediaSource mediaSource = captureDevice.CreateMediaSource())
                    {
                        // Create the source reader
                        m_sourceReader = new MF.SourceReader(mediaSource, mediaAttributes);
                    }
                }


                // Apply source configuration
                using (MF.MediaType mediaType = new MF.MediaType())
                {
                    mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                    mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32);
                    m_sourceReader.SetCurrentMediaType(
                        MF.SourceReaderIndex.FirstVideoStream,
                        mediaType);
                    m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true));
                }
                // Read some information about the source
                using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream))
                {
                    long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize);
                    m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong));
                }

                // Get additional properties
                m_durationLong    = 0;
                m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute(
                    MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics);
            }
            catch (Exception)
            {
                this.Dispose();
                throw;
            }
        }
Beispiel #2
0
        /// <summary>
        /// Opens the given video file and plays it directly.
        /// </summary>
        /// <param name="videoLink">The link to the video file.</param>
        public async Task OpenAndShowVideoFileAsync(ResourceLink videoLink)
        {
            // Check for correct state
            if (this.State != MediaPlayerState.NothingToDo)
            {
                throw new InvalidOperationException("Unable to open video file as long as there is another video playing!");
            }

            // Apply new state
            this.State = MediaPlayerState.Opening;

            try
            {
                // Create media session and a corresponding event listener obect for async events
                MF.MediaFactory.CreateMediaSession(null, out m_mediaSession);
                m_sessionEventHandler = MFSessionEventListener.AttachTo(m_mediaSession);
                m_sessionEventHandler.EndOfPresentation += OnSessionEventHandlerEndOfPresentationReached;

                // Create source object
                MF.SourceResolver sourceResolver = new MF.SourceResolver();
                MF.ObjectType     objType        = MF.ObjectType.Invalid;
                m_videoSourceStreamNet = videoLink.OpenInputStream();
                m_videoSourceStream    = new MF.ByteStream(m_videoSourceStreamNet);
                SharpDX.ComObject objSource = sourceResolver.CreateObjectFromStream(
                    m_videoSourceStream,
                    "Dummy." + videoLink.FileExtension,
                    MF.SourceResolverFlags.MediaSource,
                    out objType);
                using (MF.MediaSource mediaSource = objSource.QueryInterface <MF.MediaSource>())
                {
                    GraphicsHelper.SafeDispose(ref objSource);
                    GraphicsHelper.SafeDispose(ref sourceResolver);

                    await ShowVideoAsync(mediaSource);
                }

                // Video opened successfully
                m_currentVideoLink     = videoLink;
                m_currentCaptureDevice = null;
                this.State             = MediaPlayerState.Playing;
            }
            catch (Exception)
            {
                // Unload all resources in case of an exception
                DisposeResources();

                throw;
            }
        }
Beispiel #3
0
        public async Task ShowCaptureDeviceAsync(CaptureDeviceInfo captureDevice)
        {
            // Check for correct state
            if (this.State != MediaPlayerState.NothingToDo)
            {
                throw new InvalidOperationException("Unable to open video file as long as there is another video playing!");
            }

            // Apply new state
            this.State = MediaPlayerState.Opening;

            try
            {
                // Create media session and a corresponding event listener obect for async events
                MF.MediaFactory.CreateMediaSession(null, out m_mediaSession);
                m_sessionEventHandler = MFSessionEventListener.AttachTo(m_mediaSession);
                m_sessionEventHandler.EndOfPresentation += OnSessionEventHandlerEndOfPresentationReached;

                // Create the media source
                using (MF.MediaSource mediaSource = captureDevice.CreateMediaSource())
                {
                    // Show the video
                    await ShowVideoAsync(mediaSource);
                }

                // Video opened successfully
                m_currentVideoLink     = null;
                m_currentCaptureDevice = captureDevice;
                this.State             = MediaPlayerState.Playing;
            }
            catch (Exception)
            {
                // Unload all resources in case of an exception
                DisposeResources();

                throw;
            }
        }
Beispiel #4
0
        private async Task ShowVideoAsync(MF.MediaSource mediaSource)
        {
            MF.Topology topology;
            MF.PresentationDescriptor presentationDescriptor;
            bool containsVideoStream = false;
            bool containsAudioStream = false;

            lock (m_mfResourceLock)
            {
                // Create topology
                MF.MediaFactory.CreateTopology(out topology);
                mediaSource.CreatePresentationDescriptor(out presentationDescriptor);
                int streamDescriptorCount = presentationDescriptor.StreamDescriptorCount;
                for (int loop = 0; loop < streamDescriptorCount; loop++)
                {
                    SharpDX.Mathematics.Interop.RawBool selected = false;
                    MF.StreamDescriptor streamDescriptor;
                    presentationDescriptor.GetStreamDescriptorByIndex(loop, out selected, out streamDescriptor);
                    if (selected)
                    {
                        // Create source node
                        MF.TopologyNode sourceNode = null;
                        MF.MediaFactory.CreateTopologyNode(MF.TopologyType.SourceStreamNode, out sourceNode);
                        sourceNode.Set(MF.TopologyNodeAttributeKeys.Source, mediaSource);
                        sourceNode.Set(MF.TopologyNodeAttributeKeys.PresentationDescriptor, presentationDescriptor);
                        sourceNode.Set(MF.TopologyNodeAttributeKeys.StreamDescriptor, streamDescriptor);

                        // Create output node
                        MF.TopologyNode     outputNode       = null;
                        MF.MediaTypeHandler mediaTypeHandler = streamDescriptor.MediaTypeHandler;
                        Guid majorType = mediaTypeHandler.MajorType;
                        MF.MediaFactory.CreateTopologyNode(MF.TopologyType.OutputNode, out outputNode);
                        if (MF.MediaTypeGuids.Audio == majorType)
                        {
                            containsAudioStream = true;
                            MF.Activate audioRenderer;
                            MF.MediaFactory.CreateAudioRendererActivate(out audioRenderer);
                            outputNode.Object = audioRenderer;
                            GraphicsHelper.SafeDispose(ref audioRenderer);
                        }
                        else if (MF.MediaTypeGuids.Video == majorType)
                        {
                            if (m_targetControl == null)
                            {
                                throw new SeeingSharpException("Unable to display vido when MediaPlayerComponent is not bound to a target control!");
                            }

                            containsVideoStream = true;
                            MF.Activate videoRenderer;
                            MF.MediaFactory.CreateVideoRendererActivate(
                                m_targetControl.Handle,
                                out videoRenderer);
                            outputNode.Object = videoRenderer;
                            GraphicsHelper.SafeDispose(ref videoRenderer);
                        }

                        // Append nodes to topology
                        topology.AddNode(sourceNode);
                        topology.AddNode(outputNode);
                        sourceNode.ConnectOutput(0, outputNode, 0);

                        // Clear COM references
                        GraphicsHelper.SafeDispose(ref sourceNode);
                        GraphicsHelper.SafeDispose(ref outputNode);
                        GraphicsHelper.SafeDispose(ref mediaTypeHandler);
                    }

                    // Clear COM references
                    GraphicsHelper.SafeDispose(ref streamDescriptor);
                }

                // Get the total duration of the video
                long durationLong = 0;
                try
                {
                    durationLong           = presentationDescriptor.Get <long>(MF.PresentationDescriptionAttributeKeys.Duration);
                    m_currentVideoDuration = TimeSpan.FromTicks(durationLong);
                }
                catch (SharpDX.SharpDXException)
                {
                    m_currentVideoDuration = TimeSpan.MaxValue;
                }
            }

            // Dispose reference to the presentation descriptor
            GraphicsHelper.SafeDispose(ref presentationDescriptor);

            // Apply build topology to the session
            Task <MF.MediaEvent> topologyReadyWaiter = m_sessionEventHandler.WaitForEventAsync(
                MF.MediaEventTypes.SessionTopologyStatus,
                (eventData) => eventData.Get <MF.TopologyStatus>(MF.EventAttributeKeys.TopologyStatus) == MF.TopologyStatus.Ready,
                CancellationToken.None);

            m_mediaSession.SetTopology(MF.SessionSetTopologyFlags.None, topology);
            await topologyReadyWaiter;

            // Clear reference to the topology
            GraphicsHelper.SafeDispose(ref topology);

            lock (m_mfResourceLock)
            {
                using (MF.ServiceProvider serviceProvider = m_mediaSession.QueryInterface <MF.ServiceProvider>())
                {
                    // Query for display control service
                    if (containsVideoStream)
                    {
                        m_displayControl = serviceProvider.GetService <MF.VideoDisplayControl>(
                            new Guid("{0x1092a86c, 0xab1a, 0x459a,{0xa3, 0x36, 0x83, 0x1f, 0xbc, 0x4d, 0x11, 0xff}}"));
                    }

                    // Query for volume control service
                    if (containsAudioStream)
                    {
                        m_audioStreamVolume = serviceProvider.GetService <MF.AudioStreamVolume>(
                            MF.MediaServiceKeys.StreamVolume);

                        // Set initial volume
                        this.AudioVolume = m_audioVolume;
                    }
                }
            }

            // Start playing the video
            await StartSessionInternalAsync(true);
        }