/// <summary> /// Creates the <see cref="IModuleConfiguration"/> instance and invokes <see cref="IModuleConfiguration.Initialize"/>. /// </summary> /// <param name="module">The module</param> /// <param name="assembly">The assembly</param> public static void Initialize(ModuleInfo module, Assembly assembly) { if (module == null) { throw new ArgumentNullException(nameof(module)); } if (assembly == null) { throw new ArgumentNullException(nameof(assembly)); } var moduleConfigurationType = assembly.GetType(module.ModuleConfigurationFullName); if (moduleConfigurationType == null) { throw new ArgumentException($"Unable to resolve the type for '{module.ModuleConfigurationFullName}'"); } var instance = SourceResolver.CreateInstance(moduleConfigurationType); var moduleConfiguration = instance as IModuleConfiguration; if (moduleConfiguration == null) { throw new ArgumentException($"The module configuration '{module.ModuleConfigurationFullName}' ('{module.ModuleName}') has to implement IModuleConfiguration interface"); } moduleConfiguration.Initialize(); }
public void Create_Instance_With_Default_Factory_Works() { var instance = SourceResolver.CreateInstance(typeof(MyViewA)); Assert.IsNotNull(instance); Assert.AreEqual(typeof(MyViewA), instance.GetType()); }
public void Initialize() { injector.RegisterType <IMyService, MyService>(); // injector.RegisterType<IMySharedService, MySharedService>(); SourceResolver.RegisterTypeForNavigation <ViewC>(); }
private MediaSource CreateMediaSource(string sURL) { SourceResolver sourceResolver = new SourceResolver(); ComObject comObject; comObject = sourceResolver.CreateObjectFromURL(sURL, SourceResolverFlags.MediaSource | SourceResolverFlags.ContentDoesNotHaveToMatchExtensionOrMimeType); return(comObject.QueryInterface <MediaSource>()); }
public static bool CreateMediaSession(byte[] mediaData, out MediaSession?session, IntPtr?windowHandle = null, bool isStartUpMediaManager = false) { PresentationDescriptor?pd = null; Topology?topology = null; try { if (isStartUpMediaManager) { MediaManager.Startup(); } var attributes = new MediaAttributes(mediaData.Length); MediaFactory.CreateMediaSession(attributes, out session); var resolver = new SourceResolver(); var byteStream = new ByteStream(mediaData); resolver.CreateObjectFromByteStream(byteStream, null, (int)SourceResolverFlags.ByteStream, null, out var objType, out var videoObject); GetMediaSource(videoObject, out var source); if (source != null) { MediaFactory.CreateTopology(out topology); source.CreatePresentationDescriptor(out pd); var r1 = CreatePlaybackTopology(source, pd, windowHandle.HasValue ? windowHandle.Value : IntPtr.Zero, out topology); if (r1.Success) { session.SetTopology(0, topology); return(true); } else { session = null; topology = null; return(false); } } else { session = null; topology = null; return(false); } } catch (SharpDXException ex) { Debug.Print(ex.ToString()); session = null; return(false); } finally { pd?.Dispose(); topology?.Dispose(); } }
public void Registers_Type_For_Navigation() { SourceResolver.ClearTypesForNavigation(); Assert.AreEqual(0, SourceResolver.TypesForNavigation.Count); SourceResolver.RegisterTypeForNavigation <MyNavViewA>("A"); Assert.AreEqual(1, SourceResolver.TypesForNavigation.Count); Assert.AreEqual(typeof(MyNavViewA), SourceResolver.TypesForNavigation["A"]); SourceResolver.RegisterTypeForNavigation <MyNavViewB>("B"); Assert.AreEqual(2, SourceResolver.TypesForNavigation.Count); Assert.AreEqual(typeof(MyNavViewA), SourceResolver.TypesForNavigation["A"]); Assert.AreEqual(typeof(MyNavViewB), SourceResolver.TypesForNavigation["B"]); SourceResolver.ClearTypesForNavigation(); }
public void Change_And_Reset_The_Factory() { var mySourceFactory = new MySourceFactory(); Assert.AreEqual(false, mySourceFactory.IsCalled); SourceResolver.SetFactory(mySourceFactory.CreateInstance); var instance = SourceResolver.CreateInstance(typeof(MyViewB)); Assert.IsNotNull(instance); Assert.AreEqual(typeof(MyViewB), instance.GetType()); Assert.AreEqual(true, mySourceFactory.IsCalled); SourceResolver.SetFactoryToDefault(); mySourceFactory.IsCalled = false; var instance2 = SourceResolver.CreateInstance(typeof(MyViewB)); Assert.IsNotNull(instance2); Assert.AreEqual(typeof(MyViewB), instance2.GetType()); Assert.AreEqual(false, mySourceFactory.IsCalled); }
public void Setup(string fileName, Direct3DDeviceManager devMan = null) { logger.Debug("VideoFileSource::Setup()"); // using (var sourceResolver = new SourceResolver()) { var unkObj = sourceResolver.CreateObjectFromURL(fileName, SourceResolverFlags.MediaSource); var guid = typeof(MediaSource).GUID; unkObj.QueryInterface(ref guid, out var pUnk); mediaSource = new MediaSource(pUnk); } using (var mediaAttributes = new MediaAttributes(IntPtr.Zero)) { MediaFactory.CreateAttributes(mediaAttributes, 5); //mediaAttributes.Set(SourceReaderAttributeKeys.EnableVideoProcessing, 1); if (devMan != null) { //mediaAttributes.Set(SourceReaderAttributeKeys.DisableDxva, 0); mediaAttributes.Set(SourceReaderAttributeKeys.D3DManager, devMan); } //mediaAttributes.Set(CodecApiPropertyKeys.AVLowLatencyMode, false); sourceReader = new SourceReader(mediaSource, mediaAttributes); } var charact = mediaSource.Characteristics; Console.WriteLine(MfTool.LogEnumFlags((MediaSourceCharacteristics)charact)); Console.WriteLine("------------------CurrentMediaType-------------------"); int videoStreamIndex = (int)SourceReaderIndex.FirstVideoStream; using (var currentMediaType = sourceReader.GetCurrentMediaType(videoStreamIndex)) { Console.WriteLine(MfTool.LogMediaType(currentMediaType)); var frameSize = currentMediaType.Get(MediaTypeAttributeKeys.FrameSize); var frameRate = currentMediaType.Get(MediaTypeAttributeKeys.FrameRate); OutputMediaType = new MediaType(); OutputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); OutputMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12); // VideoFormatGuids.Yv12); OutputMediaType.Set(MediaTypeAttributeKeys.FrameSize, frameSize); OutputMediaType.Set(MediaTypeAttributeKeys.FrameRate, frameRate); OutputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); OutputMediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1); sourceReader.SetCurrentMediaType(videoStreamIndex, OutputMediaType); Console.WriteLine("------------------NEW MediaType-------------------"); Console.WriteLine(MfTool.LogMediaType(OutputMediaType)); } }
/// <summary> /// Create a topology to be played with a MediaSession from a filepath. /// </summary> internal static Topology CreateTopology(ByteStream mediaInputStream, out MediaSource mediaSource) { // collector to dispose all the created Media Foundation native objects. var collector = new ObjectCollector(); // Get the MediaSource object. var sourceResolver = new SourceResolver(); collector.Add(sourceResolver); ComObject mediaSourceObject; // Try to load music try { mediaSourceObject = sourceResolver.CreateObjectFromStream(mediaInputStream, null, SourceResolverFlags.MediaSource | SourceResolverFlags.ContentDoesNotHaveToMatchExtensionOrMimeType); } catch (SharpDXException) { collector.Dispose(); throw new InvalidOperationException("Music stream format not supported"); } Topology retTopo; try { mediaSource = mediaSourceObject.QueryInterface <MediaSource>(); collector.Add(mediaSourceObject); // Get the PresentationDescriptor PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); collector.Add(presDesc); // Create the topology MediaFactory.CreateTopology(out retTopo); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { RawBool selected; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); collector.Add(desc); if (selected) { // Test that the audio file data is valid and supported. var typeHandler = desc.MediaTypeHandler; collector.Add(typeHandler); var majorType = typeHandler.MajorType; if (majorType != MediaTypeGuids.Audio) { throw new InvalidOperationException("The music stream is not a valid audio stream."); } for (int mType = 0; mType < typeHandler.MediaTypeCount; mType++) { MediaType type; typeHandler.GetMediaTypeByIndex(mType, out type); collector.Add(type); var nbChannels = type.Get(MediaTypeAttributeKeys.AudioNumChannels); if (nbChannels > 2) { throw new InvalidOperationException("The provided audio stream has more than 2 channels."); } } // create the topology (source,...) TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); collector.Add(sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); collector.Add(outputNode); Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); collector.Add(activate); outputNode.Object = activate; retTopo.AddNode(sourceNode); retTopo.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); } } } finally { collector.Dispose(); } return(retTopo); }
public override void PlayFile(string filename) { //Load the file MediaSource mediaSource; { var resolver = new SourceResolver(); ObjectType otype; var source = new ComObject(resolver.CreateObjectFromURL(filename, SourceResolverFlags.MediaSource, null, out otype)); try { // Sometimes throws HRESULT: [0x80004002], Module: [General], ApiCode: [E_NOINTERFACE/No such interface supported], Message: No such interface supported. Bug? mediaSource = source.QueryInterface <MediaSource>(); } catch (SharpDXException) { mediaSource = null; FLLog.Error("VideoPlayerWMF", "QueryInterface failed on Media Foundation"); } resolver.Dispose(); source.Dispose(); } if (mediaSource is null) { return; } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (int i = 0; i < presDesc.StreamDescriptorCount; i++) { SharpDX.Mathematics.Interop.RawBool selected; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType == MediaTypeGuids.Video) { Activate activate; videoSampler = new MFSamples(); //retrieve size of video long sz = desc.MediaTypeHandler.CurrentMediaType.Get <long>(new Guid("{1652c33d-d6b2-4012-b834-72030849a37d}")); int height = (int)(sz & uint.MaxValue), width = (int)(sz >> 32); _texture = new Texture2D(width, height, false, SurfaceFormat.Color); mt = new MediaType(); mt.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); // Specify that we want the data to come in as RGB32. mt.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71")); GetMethods(); MFCreateSampleGrabberSinkActivate(mt, videoSampler, out activate); outputNode.Object = activate; } if (majorType == MediaTypeGuids.Audio) { Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; } topology.AddNode(sourceNode); topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); //Play the file cb = new MFCallback(this, session); session.BeginGetEvent(cb, null); session.SetTopology(SessionSetTopologyFlags.Immediate, topology); // Get the clock clock = session.Clock.QueryInterface <PresentationClock>(); // Start playing. Playing = true; }
public static MediaFoundationStreamingSources CreateFromニコ動(string user_id, string password, string video_id, WaveFormat soundDeviceFormat) { var sources = new MediaFoundationStreamingSources(); #region " ニコ動から SourceReaderEx を生成する。" //---------------- if (null == _HttpClient) { _HttpClient = new HttpClient(); } // ログインする。 var content = new FormUrlEncodedContent(new Dictionary <string, string> { { "mail", user_id }, { "password", password }, { "next_url", string.Empty }, }); using (var responseLogin = _HttpClient.PostAsync("https://secure.nicovideo.jp/secure/login?site=niconico", content).Result) { } // 動画ページにアクセスする。(getflvより前に) var responseWatch = _HttpClient.GetStringAsync($"http://www.nicovideo.jp/watch/{video_id}").Result; // 動画情報を取得する。 var responseGetFlv = _HttpClient.GetStringAsync($"http://flapi.nicovideo.jp/api/getflv/{video_id}").Result; var flvmap = HttpUtility.ParseQueryString(responseGetFlv); var flvurl = flvmap["url"]; // 動画の長さを取得する。 ulong 長さbyte = 0; string contentType = ""; using (var requestMovie = new HttpRequestMessage(HttpMethod.Get, flvurl)) using (var responseMovie = _HttpClient.SendAsync(requestMovie, HttpCompletionOption.ResponseHeadersRead).Result) { 長さbyte = (ulong)(responseMovie.Content.Headers.ContentLength); contentType = responseMovie.Content.Headers.ContentType.MediaType; } // IMFByteStream を生成する。 sources._ByteStream = new ByteStream(IntPtr.Zero); sources._HttpRandomAccessStream = new HttpRandomAccessStream(_HttpClient, 長さbyte, flvurl); sources._unkHttpRandomAccessStream = new ComObject(Marshal.GetIUnknownForObject(sources._HttpRandomAccessStream)); MediaFactory.CreateMFByteStreamOnStreamEx(sources._unkHttpRandomAccessStream, sources._ByteStream); using (var 属性 = sources._ByteStream.QueryInterfaceOrNull <MediaAttributes>()) { // content-type を設定する。 属性.Set(ByteStreamAttributeKeys.ContentType, contentType); } // SourceResolver で IMFByteStream から MediaSouce を取得する。 using (var sourceResolver = new SourceResolver()) using (var unkMediaSource = sourceResolver.CreateObjectFromStream(sources._ByteStream, null, SourceResolverFlags.MediaSource)) { sources._MediaSource = unkMediaSource.QueryInterface <MediaSource>(); // MediaSource から SourceReaderEx を生成する。 using (var 属性 = new MediaAttributes()) { // DXVAに対応しているGPUの場合には、それをデコードに利用するよう指定する。 属性.Set(SourceReaderAttributeKeys.D3DManager, グラフィックデバイス.Instance.MFDXGIDeviceManager); // 追加のビデオプロセッシングを有効にする。 属性.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, true); // 真偽値が bool だったり // 追加のビデオプロセッシングを有効にしたら、こちらは無効に。 属性.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); // int だったり // 属性を使って、SourceReaderEx を生成。 using (var sourceReader = new SourceReader(sources._MediaSource, 属性)) { sources._SourceReaderEx = sourceReader.QueryInterfaceOrNull <SourceReaderEx>(); } } } //---------------- #endregion #region " WaveFormat を生成。" //---------------- sources._Audioのフォーマット = new WaveFormat( soundDeviceFormat.SampleRate, 32, soundDeviceFormat.Channels, AudioEncoding.IeeeFloat); //---------------- #endregion sources._SourceReaderEx生成後の初期化(); return(sources); }
/// <summary> /// /// </summary> private void PlatformInitialize(byte[] bytes, Stream stream, string url) { if (Topology != null) { return; } MediaFactory.CreateTopology(out _topology); SharpDX.MediaFoundation.MediaSource mediaSource; { SourceResolver resolver = new SourceResolver(); ObjectType otype; ComObject source = null; if (url != null) { source = resolver.CreateObjectFromURL(url, SourceResolverFlags.MediaSource, null, out otype); } if (stream != null) { var bs = new ByteStream(stream); source = resolver.CreateObjectFromStream(bs, null, SourceResolverFlags.MediaSource, null, out otype); } if (bytes != null) { var bs = new ByteStream(bytes); source = resolver.CreateObjectFromStream(bs, null, SourceResolverFlags.MediaSource | SourceResolverFlags.ContentDoesNotHaveToMatchExtensionOrMimeType, null, out otype); } if (source == null) { throw new ArgumentException("'stream' and 'url' are null!"); } mediaSource = source.QueryInterface <SharpDX.MediaFoundation.MediaSource>(); resolver.Dispose(); source.Dispose(); } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { RawBool selected = false; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType == MediaTypeGuids.Video) { Activate activate; sampleGrabber = new VideoSampleGrabber(); _mediaType = new MediaType(); _mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); // Specify that we want the data to come in as RGB32. _mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71")); MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate); outputNode.Object = activate; long frameSize = desc.MediaTypeHandler.CurrentMediaType.Get <long>(MediaTypeAttributeKeys.FrameSize); Width = (int)(frameSize >> 32); Height = (int)(frameSize & 0x0000FFFF); } if (majorType == MediaTypeGuids.Audio) { Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; } _topology.AddNode(sourceNode); _topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); Duration = new TimeSpan(presDesc.Get <long>(PresentationDescriptionAttributeKeys.Duration)); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); videoFrame = new DynamicTexture(Game.Instance.RenderSystem, Width, Height, typeof(ColorBGRA), false, false); }
private void PlatformInitialize(string fileName) { if (_topology != null) { return; } MediaManagerState.CheckStartup(); MediaFactory.CreateTopology(out _topology); SharpDX.MediaFoundation.MediaSource mediaSource; { SourceResolver resolver = new SourceResolver(); ComObject source = resolver.CreateObjectFromURL(FilePath, SourceResolverFlags.MediaSource); mediaSource = source.QueryInterface <SharpDX.MediaFoundation.MediaSource>(); resolver.Dispose(); source.Dispose(); } mediaSource.CreatePresentationDescriptor(out PresentationDescriptor presDesc); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { presDesc.GetStreamDescriptorByIndex(i, out SharpDX.Mathematics.Interop.RawBool selected, out StreamDescriptor desc); if (selected) { MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out TopologyNode sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out TopologyNode outputNode); var typeHandler = desc.MediaTypeHandler; var majorType = typeHandler.MajorType; if (majorType != MediaTypeGuids.Audio) { throw new NotSupportedException("The song contains video data!"); } MediaFactory.CreateAudioRendererActivate(out Activate activate); outputNode.Object = activate; _topology.AddNode(sourceNode); _topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); sourceNode.Dispose(); outputNode.Dispose(); typeHandler.Dispose(); activate.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); }
public SourceText ResolveSourceText(SourceTextInfo sourceTextInfo) => SourceResolver.ResolveSource(sourceTextInfo);
protected override void SetViewFactory() { SourceResolver.SetFactory((sourceType) => container.GetNewInstance(sourceType)); }
private void PlatformInitialize() { if (Topology != null) { return; } //MediaManagerState.CheckStartup(); MediaFactory.CreateTopology(out _topology); SharpDX.MediaFoundation.MediaSource mediaSource; { SourceResolver resolver = new SourceResolver(); ObjectType otype; ComObject source = resolver.CreateObjectFromURL(FileName, SourceResolverFlags.MediaSource, null, out otype); mediaSource = source.QueryInterface <SharpDX.MediaFoundation.MediaSource>(); resolver.Dispose(); source.Dispose(); } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { RawBool selected = false; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType == MediaTypeGuids.Video) { Activate activate; SampleGrabber = new VideoSampleGrabber(); _mediaType = new MediaType(); _mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); // Specify that we want the data to come in as RGB32. _mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71")); MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate); outputNode.Object = activate; long frameSize = desc.MediaTypeHandler.CurrentMediaType.Get <long>(MediaTypeAttributeKeys.FrameSize); Width = (int)(frameSize >> 32); Height = (int)(frameSize & 0x0000FFFF); } if (majorType == MediaTypeGuids.Audio) { Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; } _topology.AddNode(sourceNode); _topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); Duration = new TimeSpan(presDesc.Get <long>(PresentationDescriptionAttributeKeys.Duration)); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); VideoFrame = new Texture2D(Game.Instance.GraphicsDevice, Width, Height, ColorFormat.Bgra8, false); }
private void PlatformInitialize() { if (Topology != null) { return; } MediaManagerState.CheckStartup(); MediaFactory.CreateTopology(out _topology); SharpDX.MediaFoundation.MediaSource mediaSource; { SourceResolver resolver = new SourceResolver(); ObjectType otype; ComObject source = resolver.CreateObjectFromURL(FileName, SourceResolverFlags.MediaSource, null, out otype); mediaSource = source.QueryInterface <SharpDX.MediaFoundation.MediaSource>(); resolver.Dispose(); source.Dispose(); } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { SharpDX.Mathematics.Interop.RawBool selected; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType == MediaTypeGuids.Video) { Activate activate; SampleGrabber = new VideoSampleGrabber(); _mediaType = new MediaType(); _mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); // Specify that we want the data to come in as RGB32. _mediaType.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71")); MediaFactory.CreateSampleGrabberSinkActivate(_mediaType, SampleGrabber, out activate); outputNode.Object = activate; } if (majorType == MediaTypeGuids.Audio) { Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; } _topology.AddNode(sourceNode); _topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); }
public void Initialize() { SourceResolver.RegisterTypeForNavigation <ViewA>(); // With View SourceResolver.RegisterTypeForNavigation <ViewBViewModel>("ViewB"); // With ViewModel (+ DataTemplate) }