/// <summary> /// Opens the given video file and plays it directly. /// </summary> /// <param name="videoLink">The link to the video file.</param> public async Task OpenAndShowVideoFileAsync(ResourceLink videoLink) { // Check for correct state if (this.State != MediaPlayerState.NothingToDo) { throw new InvalidOperationException("Unable to open video file as long as there is another video playing!"); } // Apply new state this.State = MediaPlayerState.Opening; try { // Create media session and a corresponding event listener obect for async events MF.MediaFactory.CreateMediaSession(null, out m_mediaSession); m_sessionEventHandler = MFSessionEventListener.AttachTo(m_mediaSession); m_sessionEventHandler.EndOfPresentation += OnSessionEventHandlerEndOfPresentationReached; // Create source object MF.SourceResolver sourceResolver = new MF.SourceResolver(); MF.ObjectType objType = MF.ObjectType.Invalid; m_videoSourceStreamNet = videoLink.OpenInputStream(); m_videoSourceStream = new MF.ByteStream(m_videoSourceStreamNet); SharpDX.ComObject objSource = sourceResolver.CreateObjectFromStream( m_videoSourceStream, "Dummy." + videoLink.FileExtension, MF.SourceResolverFlags.MediaSource, out objType); using (MF.MediaSource mediaSource = objSource.QueryInterface <MF.MediaSource>()) { GraphicsHelper.SafeDispose(ref objSource); GraphicsHelper.SafeDispose(ref sourceResolver); await ShowVideoAsync(mediaSource); } // Video opened successfully m_currentVideoLink = videoLink; m_currentCaptureDevice = null; this.State = MediaPlayerState.Playing; } catch (Exception) { // Unload all resources in case of an exception DisposeResources(); throw; } }
/// <summary> /// Create a topology to be played with a MediaSession from a filepath. /// </summary> internal static Topology CreateTopology(ByteStream mediaInputStream, out MediaSource mediaSource) { // collector to dispose all the created Media Foundation native objects. var collector = new ObjectCollector(); // Get the MediaSource object. var sourceResolver = new SourceResolver(); collector.Add(sourceResolver); ComObject mediaSourceObject; // Try to load music try { mediaSourceObject = sourceResolver.CreateObjectFromStream(mediaInputStream, null, SourceResolverFlags.MediaSource | SourceResolverFlags.ContentDoesNotHaveToMatchExtensionOrMimeType); } catch (SharpDXException) { collector.Dispose(); throw new InvalidOperationException("Music stream format not supported"); } Topology retTopo; try { mediaSource = mediaSourceObject.QueryInterface<MediaSource>(); collector.Add(mediaSourceObject); // Get the PresentationDescriptor PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); collector.Add(presDesc); // Create the topology MediaFactory.CreateTopology(out retTopo); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { Bool selected; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); collector.Add(desc); if (selected) { // Test that the audio file data is valid and supported. var typeHandler = desc.MediaTypeHandler; collector.Add(typeHandler); var majorType = typeHandler.MajorType; if (majorType != MediaTypeGuids.Audio) throw new InvalidOperationException("The music stream is not a valid audio stream."); for (int mType = 0; mType < typeHandler.MediaTypeCount; mType++) { MediaType type; typeHandler.GetMediaTypeByIndex(mType, out type); collector.Add(type); var nbChannels = type.Get(MediaTypeAttributeKeys.AudioNumChannels); if (nbChannels > 2) throw new InvalidOperationException("The provided audio stream has more than 2 channels."); } // create the topology (source,...) TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); collector.Add(sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); collector.Add(outputNode); Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); collector.Add(activate); outputNode.Object = activate; retTopo.AddNode(sourceNode); retTopo.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); } } } finally { collector.Dispose(); } return retTopo; }
private void PlatformInitialize() { if (Topology != null) return; MediaManagerState.CheckStartup(); MediaFactory.CreateTopology(out _topology); SharpDX.MediaFoundation.MediaSource mediaSource; { SourceResolver resolver = new SourceResolver(); ObjectType otype; ComObject source = resolver.CreateObjectFromURL(FileName, SourceResolverFlags.MediaSource, null, out otype); mediaSource = source.QueryInterface<SharpDX.MediaFoundation.MediaSource>(); resolver.Dispose(); source.Dispose(); } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { Bool selected; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType == MediaTypeGuids.Video) { Activate activate; SampleGrabber = new VideoSampleGrabber(); _mediaType = new MediaType(); _mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); // Specify that we want the data to come in as RGB32. _mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71")); MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate); outputNode.Object = activate; } if (majorType == MediaTypeGuids.Audio) { Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; } _topology.AddNode(sourceNode); _topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); }
private void PlatformInitialize() { if (Topology != null) return; //MediaManagerState.CheckStartup(); MediaFactory.CreateTopology(out _topology); SharpDX.MediaFoundation.MediaSource mediaSource; { SourceResolver resolver = new SourceResolver(); ObjectType otype; ComObject source = resolver.CreateObjectFromURL(FileName, SourceResolverFlags.MediaSource, null, out otype); mediaSource = source.QueryInterface<SharpDX.MediaFoundation.MediaSource>(); resolver.Dispose(); source.Dispose(); } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { RawBool selected = false; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType == MediaTypeGuids.Video) { Activate activate; SampleGrabber = new VideoSampleGrabber(); _mediaType = new MediaType(); _mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); // Specify that we want the data to come in as RGB32. _mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71")); MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate); outputNode.Object = activate; long frameSize = desc.MediaTypeHandler.CurrentMediaType.Get<long>(MediaTypeAttributeKeys.FrameSize); Width = (int)(frameSize >> 32); Height = (int) (frameSize & 0x0000FFFF); } if (majorType == MediaTypeGuids.Audio) { Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; } _topology.AddNode(sourceNode); _topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); Duration = new TimeSpan(presDesc.Get<long>(PresentationDescriptionAttributeKeys.Duration)); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); VideoFrame = new Texture2D(Game.Instance.GraphicsDevice, Width, Height, ColorFormat.Bgra8, false); }
/// <summary> /// /// </summary> private void PlatformInitialize( byte[] bytes, Stream stream, string url ) { if (Topology != null) { return; } MediaFactory.CreateTopology(out _topology); SharpDX.MediaFoundation.MediaSource mediaSource; { SourceResolver resolver = new SourceResolver(); ObjectType otype; ComObject source = null; if (url!=null) { source = resolver.CreateObjectFromURL(url, SourceResolverFlags.MediaSource, null, out otype); } if (stream!=null) { var bs = new ByteStream( stream ); source = resolver.CreateObjectFromStream(bs, null, SourceResolverFlags.MediaSource, null, out otype); } if (bytes!=null) { var bs = new ByteStream( bytes ); source = resolver.CreateObjectFromStream(bs, null, SourceResolverFlags.MediaSource|SourceResolverFlags.ContentDoesNotHaveToMatchExtensionOrMimeType, null, out otype); } if (source==null) { throw new ArgumentException("'stream' and 'url' are null!"); } mediaSource = source.QueryInterface<SharpDX.MediaFoundation.MediaSource>(); resolver.Dispose(); source.Dispose(); } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { RawBool selected = false; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType == MediaTypeGuids.Video) { Activate activate; sampleGrabber = new VideoSampleGrabber(); _mediaType = new MediaType(); _mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); // Specify that we want the data to come in as RGB32. _mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71")); MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate); outputNode.Object = activate; long frameSize = desc.MediaTypeHandler.CurrentMediaType.Get<long>(MediaTypeAttributeKeys.FrameSize); Width = (int)(frameSize >> 32); Height = (int) (frameSize & 0x0000FFFF); } if (majorType == MediaTypeGuids.Audio) { Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; } _topology.AddNode(sourceNode); _topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); Duration = new TimeSpan(presDesc.Get<long>(PresentationDescriptionAttributeKeys.Duration)); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); videoFrame = new DynamicTexture(Game.Instance.RenderSystem, Width, Height, typeof(ColorBGRA), false, false); }