public MediaTypeItems() { logger = new Gnosis.Utilities.DebugLogger(); characterSetFactory = new CharacterSetFactory(); //mediaFactory = new MediaFactory(characterSetFactory); mediaFactory = new MediaFactory(logger); }
public MainWindow() { InitializeComponent(); try { this.logger = Gnosis.Utilities.Log4NetLogger.GetDefaultLogger(typeof(MainWindow)); } catch (Exception loggerEx) { throw new ApplicationException("Could not initialize logger", loggerEx); } try { logger.Info("Initializing Alexandria"); mediaFactory = new MediaFactory(logger); securityContext = new SecurityContext(mediaFactory); tagTypeFactory = new TagTypeFactory(); mediaRepository = new SQLiteMediaRepository(logger, mediaFactory); mediaRepository.Initialize(); linkRepository = new SQLiteLinkRepository(logger); linkRepository.Initialize(); tagRepository = new SQLiteTagRepository(logger, tagTypeFactory); tagRepository.Initialize(); metadataRepository = new SQLiteMetadataRepository(logger, securityContext, mediaFactory); metadataRepository.Initialize(); marqueeRepository = new SQLiteMarqueeRepository(logger); audioStreamFactory = new AudioStreamFactory(); videoPlayer = new Gnosis.Video.Vlc.VideoPlayerControl(); videoPlayer.Initialize(logger, () => GetVideoHost()); catalogController = new CatalogController(logger, securityContext, mediaFactory, mediaRepository, linkRepository, tagRepository, metadataRepository, audioStreamFactory); spiderFactory = new SpiderFactory(logger, securityContext, mediaFactory, linkRepository, tagRepository, mediaRepository, metadataRepository, audioStreamFactory); metadataController = new MediaItemController(logger, securityContext, mediaFactory, linkRepository, tagRepository, metadataRepository); taskController = new TaskController(logger, mediaFactory, videoPlayer, spiderFactory, metadataController, marqueeRepository, metadataRepository); tagController = new TagController(logger, tagRepository); commandController = new CommandController(logger); taskResultView.Initialize(logger, securityContext, mediaFactory, metadataController, taskController, tagController, videoPlayer); //taskManagerView.Initialize(logger, taskController, taskResultView); searchView.Initialize(logger, taskController, taskResultView); commandView.Initialize(logger, commandController, taskController, taskResultView); ScreenSaver.Disable(); } catch (Exception ex) { logger.Error("MainWindow.ctor", ex); } }
protected MediaTestBase() { logger = new DebugLogger(); mediaFactory = new MediaFactory(logger); connection = new SQLiteConnectionFactory().Create("Data Source=:memory:;Version=3;"); connection.Open(); repository = new SQLiteMediaRepository(logger, mediaFactory, connection); repository.Initialize(); }
public RssDocuments() { logger = new Gnosis.Utilities.DebugLogger(); mediaFactory = new MediaFactory(logger); }
public MfWriter(VideoWriterArgs Args, Device Device) { if (Args.ImageProvider.EditorType == typeof(Direct2DEditor)) { _inputFormat = VideoFormatGuids.NV12; } else { _inputFormat = VideoFormatGuids.Rgb32; } _device = Device; _frameDuration = TenPower7 / Args.FrameRate; var attr = GetSinkWriterAttributes(Device); _writer = MediaFactory.CreateSinkWriterFromURL(Args.FileName, null, attr); var w = Args.ImageProvider.Width; var h = Args.ImageProvider.Height; _bufferSize = w * h * 4; using (var mediaTypeOut = new MediaType()) { mediaTypeOut.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodingFormat); mediaTypeOut.Set(MediaTypeAttributeKeys.AvgBitrate, BitRate); mediaTypeOut.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); mediaTypeOut.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h)); mediaTypeOut.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1)); mediaTypeOut.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1)); _writer.AddStream(mediaTypeOut, out _); } using (var mediaTypeIn = new MediaType()) { mediaTypeIn.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaTypeIn.Set(MediaTypeAttributeKeys.Subtype, _inputFormat); mediaTypeIn.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); mediaTypeIn.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h)); mediaTypeIn.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1)); mediaTypeIn.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1)); mediaTypeIn.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1); var encoderParams = new MediaAttributes(2); encoderParams.Set(RateControlModeKey, RateControlMode.Quality); encoderParams.Set(QualityKey, Args.VideoQuality); _writer.SetInputMediaType(VideoStreamIndex, mediaTypeIn, encoderParams); } if (Args.AudioProvider != null) { var wf = Args.AudioProvider.WaveFormat; _audioInBytesPerSecond = wf.SampleRate * wf.Channels * wf.BitsPerSample / 8; using (var audioTypeOut = GetMediaType(wf)) { audioTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodedAudioFormat); audioTypeOut.Set(MediaTypeAttributeKeys.AudioAvgBytesPerSecond, GetAacBitrate(Args.AudioQuality)); _writer.AddStream(audioTypeOut, out _); } using (var audioTypeIn = GetMediaType(wf)) { audioTypeIn.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm); _writer.SetInputMediaType(AudioStreamIndex, audioTypeIn, null); } } _writer.BeginWriting(); _copyTexture = new Texture2D(Device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = Format.B8G8R8A8_UNorm, Width = w, Height = h, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging }); _sample = MediaFactory.CreateVideoSampleFromSurface(null); // Create the media buffer from the texture MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, _copyTexture, 0, false, out _mediaBuffer); using (var buffer2D = _mediaBuffer.QueryInterface <Buffer2D>()) _mediaBuffer.CurrentLength = buffer2D.ContiguousLength; // Attach the created buffer to the sample _sample.AddBuffer(_mediaBuffer); }
public static void Shutdown() { MediaFactory.Shutdown(); }
public unsafe bool ProcessSample(Sample inputSample, out Sample outputSample) { bool Result = false; outputSample = null; if (inputSample == null) { return(false); } processor.ProcessInput(0, inputSample, 0); //if (processor.OutputStatus == (int)MftOutputStatusFlags.MftOutputStatusSampleReady) { processor.GetOutputStreamInfo(0, out TOutputStreamInformation streamInfo); MftOutputStreamInformationFlags flags = (MftOutputStreamInformationFlags)streamInfo.DwFlags; bool createSample = !flags.HasFlag(MftOutputStreamInformationFlags.MftOutputStreamProvidesSamples); // Create output sample if (createSample) { outputSample = MediaFactory.CreateSample(); outputSample.SampleTime = inputSample.SampleTime; outputSample.SampleDuration = inputSample.SampleDuration; outputSample.SampleFlags = inputSample.SampleFlags; using (var mediaBuffer = MediaFactory.CreateMemoryBuffer(streamInfo.CbSize)) { outputSample.AddBuffer(mediaBuffer); } } TOutputDataBuffer[] outputDataBuffer = new TOutputDataBuffer[1]; var data = new TOutputDataBuffer { DwStatus = 0, DwStreamID = 0, PSample = outputSample, PEvents = null, }; outputDataBuffer[0] = data; var res = processor.TryProcessOutput(TransformProcessOutputFlags.None, outputDataBuffer, out TransformProcessOutputStatus status); if (res == SharpDX.Result.Ok) { if (outputSample == null) { outputSample = outputDataBuffer[0].PSample; } Debug.Assert(outputSample != null, "res.Success && outputSample != null"); Result = true; } else if (res == SharpDX.MediaFoundation.ResultCode.TransformNeedMoreInput) { logger.Warn(res.ToString() + " TransformNeedMoreInput"); Result = true; } else if (res == SharpDX.MediaFoundation.ResultCode.TransformStreamChange) { logger.Warn(res.ToString() + " TransformStreamChange"); MediaType newOutputType = null; try { processor.TryGetOutputAvailableType(outputStreamId, 0, out newOutputType); processor.SetOutputType(outputStreamId, newOutputType, 0); if (OutputMediaType != null) { OutputMediaType.Dispose(); OutputMediaType = null; } OutputMediaType = newOutputType; logger.Info("============== NEW OUTPUT TYPE=================="); logger.Info(MfTool.LogMediaType(OutputMediaType)); } finally { newOutputType?.Dispose(); newOutputType = null; } } else { res.CheckError(); } } return(Result); }
public RemoteRssTests() { logger = new Gnosis.Utilities.DebugLogger(); mediaFactory = new MediaFactory(logger); }
public RssDocuments() { logger = new Gnosis.Utilities.DebugLogger(); mediaFactory = new MediaFactory(logger); }
public static MediaFoundationStreamingSources CreateFromニコ動(string user_id, string password, string video_id, WaveFormat soundDeviceFormat) { var sources = new MediaFoundationStreamingSources(); #region " ニコ動から SourceReaderEx を生成する。" //---------------- if (null == _HttpClient) { _HttpClient = new HttpClient(); } // ログインする。 var content = new FormUrlEncodedContent(new Dictionary <string, string> { { "mail", user_id }, { "password", password }, { "next_url", string.Empty }, }); using (var responseLogin = _HttpClient.PostAsync("https://secure.nicovideo.jp/secure/login?site=niconico", content).Result) { } // 動画ページにアクセスする。(getflvより前に) var responseWatch = _HttpClient.GetStringAsync($"http://www.nicovideo.jp/watch/{video_id}").Result; // 動画情報を取得する。 var responseGetFlv = _HttpClient.GetStringAsync($"http://flapi.nicovideo.jp/api/getflv/{video_id}").Result; var flvmap = HttpUtility.ParseQueryString(responseGetFlv); var flvurl = flvmap["url"]; // 動画の長さを取得する。 ulong 長さbyte = 0; string contentType = ""; using (var requestMovie = new HttpRequestMessage(HttpMethod.Get, flvurl)) using (var responseMovie = _HttpClient.SendAsync(requestMovie, HttpCompletionOption.ResponseHeadersRead).Result) { 長さbyte = (ulong)(responseMovie.Content.Headers.ContentLength); contentType = responseMovie.Content.Headers.ContentType.MediaType; } // IMFByteStream を生成する。 sources._ByteStream = new ByteStream(IntPtr.Zero); sources._HttpRandomAccessStream = new HttpRandomAccessStream(_HttpClient, 長さbyte, flvurl); sources._unkHttpRandomAccessStream = new ComObject(Marshal.GetIUnknownForObject(sources._HttpRandomAccessStream)); MediaFactory.CreateMFByteStreamOnStreamEx(sources._unkHttpRandomAccessStream, sources._ByteStream); using (var 属性 = sources._ByteStream.QueryInterfaceOrNull <MediaAttributes>()) { // content-type を設定する。 属性.Set(ByteStreamAttributeKeys.ContentType, contentType); } // SourceResolver で IMFByteStream から MediaSouce を取得する。 using (var sourceResolver = new SourceResolver()) using (var unkMediaSource = sourceResolver.CreateObjectFromStream(sources._ByteStream, null, SourceResolverFlags.MediaSource)) { sources._MediaSource = unkMediaSource.QueryInterface <MediaSource>(); // MediaSource から SourceReaderEx を生成する。 using (var 属性 = new MediaAttributes()) { // DXVAに対応しているGPUの場合には、それをデコードに利用するよう指定する。 属性.Set(SourceReaderAttributeKeys.D3DManager, DXResources.Instance.MFDXGIDeviceManager); // 追加のビデオプロセッシングを有効にする。 属性.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, true); // 真偽値が bool だったり // 追加のビデオプロセッシングを有効にしたら、こちらは無効に。 属性.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); // int だったり // 属性を使って、SourceReaderEx を生成。 using (var sourceReader = new SourceReader(sources._MediaSource, 属性)) { sources._SourceReaderEx = sourceReader.QueryInterfaceOrNull <SourceReaderEx>(); } } } //---------------- #endregion #region " WaveFormat を生成。" //---------------- sources._Audioのフォーマット = new WaveFormat( soundDeviceFormat.SampleRate, 32, soundDeviceFormat.Channels, AudioEncoding.IeeeFloat); //---------------- #endregion sources._SourceReaderEx生成後の初期化(); return(sources); }
public virtual IEnumerable <IStorageLocation> RetrieveSopInstances(IObjectId query, DicomMediaProperties mediaInfo) { return(StorageService.EnumerateLocation(MediaFactory.Create(query, mediaInfo))); }
public virtual IStorageLocation RetrieveSopInstance(IObjectId query, DicomMediaProperties mediaInfo) { return(StorageService.GetLocation(MediaFactory.Create(query, mediaInfo))); }
private void PlatformInitialize(string fileName) { if (_topology != null) { return; } MediaManagerState.CheckStartup(); MediaFactory.CreateTopology(out _topology); SharpDX.MediaFoundation.MediaSource mediaSource; { SourceResolver resolver; MediaFactory.CreateSourceResolver(out resolver); ObjectType otype; ComObject source; resolver.CreateObjectFromURL(FilePath, (int)SourceResolverFlags.MediaSource, null, out otype, out source); mediaSource = source.QueryInterface <SharpDX.MediaFoundation.MediaSource>(); resolver.Dispose(); source.Dispose(); } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { Bool selected; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType != MediaTypeGuids.Audio) { throw new NotSupportedException("The song contains video data!"); } Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; _topology.AddNode(sourceNode); _topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); }
public MfColorConverter(int Width, int Height, Device Device) { var transforms = MediaFactory.FindTransform(TransformCategoryGuids.VideoProcessor, TransformEnumFlag.All); _colorConverter = transforms[0].ActivateObject <Transform>(); _deviceMan = new DXGIDeviceManager(); _deviceMan.ResetDevice(Device); _colorConverter.ProcessMessage(TMessageType.SetD3DManager, _deviceMan.NativePointer); using (var mediaTypeIn = new MediaType()) { mediaTypeIn.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaTypeIn.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Rgb32); mediaTypeIn.Set(MediaTypeAttributeKeys.FrameSize, MfWriter.PackLong(Width, Height)); mediaTypeIn.Set(MediaTypeAttributeKeys.DefaultStride, Width * 4); mediaTypeIn.Set(MediaTypeAttributeKeys.FixedSizeSamples, 1); mediaTypeIn.Set(MediaTypeAttributeKeys.SampleSize, Width * Height * 4); _colorConverter.SetInputType(0, mediaTypeIn, 0); } var outputStride = Width * 12 / 8; var outputSampleSize = Height * outputStride; using (var mediaTypeOut = new MediaType()) { mediaTypeOut.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaTypeOut.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12); mediaTypeOut.Set(MediaTypeAttributeKeys.FrameSize, MfWriter.PackLong(Width, Height)); mediaTypeOut.Set(MediaTypeAttributeKeys.DefaultStride, outputStride); mediaTypeOut.Set(MediaTypeAttributeKeys.FixedSizeSamples, 1); mediaTypeOut.Set(MediaTypeAttributeKeys.SampleSize, outputSampleSize); _colorConverter.SetOutputType(0, mediaTypeOut, 0); } _colorConverter.ProcessMessage(TMessageType.NotifyBeginStreaming, IntPtr.Zero); _copyTexture = new Texture2D(Device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.None, BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource, Format = Format.B8G8R8A8_UNorm, Width = Width, Height = Height, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Default }); _inputSample = MediaFactory.CreateVideoSampleFromSurface(null); // Create the media buffer from the texture MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, _copyTexture, 0, false, out var inputBuffer); using (var buffer2D = inputBuffer.QueryInterface <Buffer2D>()) inputBuffer.CurrentLength = buffer2D.ContiguousLength; // Attach the created buffer to the sample _inputSample.AddBuffer(inputBuffer); }
public CatalogSpiderTests() { logger = new DebugLogger(); mediaFactory = new MediaFactory(logger); securityContext = new SecurityContext(mediaFactory); }
public RemoteContentTypeItems() { logger = new Gnosis.Utilities.DebugLogger(); mediaFactory = new MediaFactory(logger); }
public override void PlayFile(string filename) { //Load the file MediaSource mediaSource; { var resolver = new SourceResolver(); ObjectType otype; ComObject source = resolver.CreateObjectFromURL(filename, SourceResolverFlags.MediaSource, null, out otype); mediaSource = source.QueryInterface <MediaSource>(); resolver.Dispose(); source.Dispose(); } PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); for (int i = 0; i < presDesc.StreamDescriptorCount; i++) { SharpDX.Mathematics.Interop.RawBool selected; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); if (selected) { TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); var majorType = desc.MediaTypeHandler.MajorType; if (majorType == MediaTypeGuids.Video) { Activate activate; videoSampler = new MFSamples(); //retrieve size of video long sz = desc.MediaTypeHandler.CurrentMediaType.Get <long>(new Guid("{1652c33d-d6b2-4012-b834-72030849a37d}")); int height = (int)(sz & uint.MaxValue), width = (int)(sz >> 32); _texture = new Texture2D(width, height, false, SurfaceFormat.Color); mt = new MediaType(); mt.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); // Specify that we want the data to come in as RGB32. mt.Set(MediaTypeAttributeKeys.Subtype, new Guid("00000016-0000-0010-8000-00AA00389B71")); MediaFactory.CreateSampleGrabberSinkActivate(mt, videoSampler, out activate); outputNode.Object = activate; } if (majorType == MediaTypeGuids.Audio) { Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); outputNode.Object = activate; } topology.AddNode(sourceNode); topology.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); sourceNode.Dispose(); outputNode.Dispose(); } desc.Dispose(); } presDesc.Dispose(); mediaSource.Dispose(); //Play the file cb = new MFCallback(this, session); session.BeginGetEvent(cb, null); session.SetTopology(SessionSetTopologyFlags.Immediate, topology); // Get the clock clock = session.Clock.QueryInterface <PresentationClock>(); // Start playing. Playing = true; }
/// <summary> /// Create a topology to be played with a MediaSession from a filepath. /// </summary> internal static Topology CreateTopology(ByteStream mediaInputStream, out MediaSource mediaSource) { // collector to dispose all the created Media Foundation native objects. var collector = new ObjectCollector(); // Get the MediaSource object. var sourceResolver = new SourceResolver(); collector.Add(sourceResolver); ComObject mediaSourceObject; // Try to load music try { mediaSourceObject = sourceResolver.CreateObjectFromStream(mediaInputStream, null, SourceResolverFlags.MediaSource | SourceResolverFlags.ContentDoesNotHaveToMatchExtensionOrMimeType); } catch (SharpDXException) { collector.Dispose(); throw new InvalidOperationException("Music stream format not supported"); } Topology retTopo; try { mediaSource = mediaSourceObject.QueryInterface <MediaSource>(); collector.Add(mediaSourceObject); // Get the PresentationDescriptor PresentationDescriptor presDesc; mediaSource.CreatePresentationDescriptor(out presDesc); collector.Add(presDesc); // Create the topology MediaFactory.CreateTopology(out retTopo); for (var i = 0; i < presDesc.StreamDescriptorCount; i++) { RawBool selected; StreamDescriptor desc; presDesc.GetStreamDescriptorByIndex(i, out selected, out desc); collector.Add(desc); if (selected) { // Test that the audio file data is valid and supported. var typeHandler = desc.MediaTypeHandler; collector.Add(typeHandler); var majorType = typeHandler.MajorType; if (majorType != MediaTypeGuids.Audio) { throw new InvalidOperationException("The music stream is not a valid audio stream."); } for (int mType = 0; mType < typeHandler.MediaTypeCount; mType++) { MediaType type; typeHandler.GetMediaTypeByIndex(mType, out type); collector.Add(type); var nbChannels = type.Get(MediaTypeAttributeKeys.AudioNumChannels); if (nbChannels > 2) { throw new InvalidOperationException("The provided audio stream has more than 2 channels."); } } // create the topology (source,...) TopologyNode sourceNode; MediaFactory.CreateTopologyNode(TopologyType.SourceStreamNode, out sourceNode); collector.Add(sourceNode); sourceNode.Set(TopologyNodeAttributeKeys.Source, mediaSource); sourceNode.Set(TopologyNodeAttributeKeys.PresentationDescriptor, presDesc); sourceNode.Set(TopologyNodeAttributeKeys.StreamDescriptor, desc); TopologyNode outputNode; MediaFactory.CreateTopologyNode(TopologyType.OutputNode, out outputNode); collector.Add(outputNode); Activate activate; MediaFactory.CreateAudioRendererActivate(out activate); collector.Add(activate); outputNode.Object = activate; retTopo.AddNode(sourceNode); retTopo.AddNode(outputNode); sourceNode.ConnectOutput(0, outputNode, 0); } } } finally { collector.Dispose(); } return(retTopo); }
private static void PlaySong(Song song) { #if WINDOWS_MEDIA_ENGINE _mediaEngineEx.Source = song.FilePath; _mediaEngineEx.Load(); _mediaEngineEx.Play(); #elif WINDOWS_MEDIA_SESSION // Cleanup the last song first. if (State != MediaState.Stopped) { _session.Stop(); _volumeController.Dispose(); _clock.Dispose(); } // Set the new song. _session.SetTopology(0, song.GetTopology()); // Get the volume interface. IntPtr volumeObj; try { MediaFactory.GetService(_session, MRPolicyVolumeService, SimpleAudioVolumeGuid, out volumeObj); } catch (Exception e) { MediaFactory.GetService(_session, MRPolicyVolumeService, SimpleAudioVolumeGuid, out volumeObj); } _volumeController = CppObject.FromPointer <SimpleAudioVolume>(volumeObj); _volumeController.Mute = _isMuted; _volumeController.MasterVolume = _volume; // Get the clock. _clock = _session.Clock.QueryInterface <PresentationClock>(); //create the callback if it hasn't been created yet if (_callback == null) { _callback = new Callback(); _session.BeginGetEvent(_callback, null); } // Start playing. var varStart = new Variant(); _session.Start(null, varStart); #elif WINDOWS_PHONE Deployment.Current.Dispatcher.BeginInvoke(() => { _mediaElement.Source = new Uri(song.FilePath, UriKind.Relative); _mediaElement.Play(); // Ensure only one subscribe _mediaElement.MediaEnded -= OnSongFinishedPlaying; _mediaElement.MediaEnded += OnSongFinishedPlaying; }); #else song.SetEventHandler(OnSongFinishedPlaying); song.Volume = _isMuted ? 0.0f : _volume; song.Play(); #endif State = MediaState.Playing; }
public MpegAudio() { logger = new Gnosis.Utilities.DebugLogger(); mediaFactory = new MediaFactory(logger); }
private static void PlatformInitialize() { MediaManagerState.CheckStartup(); MediaFactory.CreateMediaSession(null, out _session); }