private async Task InitializeAsync(int sampleRate, int latency) { _bwp = new BufferedWaveProvider(new WaveFormat(44100, 16, 2)); //var mde = new MMDeviceEnumerator(); //var mmDevice = mde.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); await AudioSourceManager.InitializeAsync(new FileSystem()).ConfigureAwait(false); CorePlayer = new GPlayer(sampleRate); _buffer = CorePlayer.CreateBuffer(latency); //_buffer = new short[882]; _cts = new CancellationTokenSource(); _nativeplayer = new WasapiOut(AudioClientShareMode.Shared, true, 100); _nativeplayer.Init(_bwp); _nativeplayer.Play(); }
private async void InitializeAsync() { await UpdatePlaylistAsync(); MasterVolume = 100; await AudioSourceManager.InitializeAsync(new FileSystem(), "GroorineCore"); var settings = new AudioGraphSettings(AudioRenderCategory.Media) { }; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { await new MessageDialog("Can't create AudioGraph! Application will stop...").ShowAsync(); Application.Current.Exit(); } _graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { await new MessageDialog("Can't create DeviceOutputNode! Application will stop...").ShowAsync(); Application.Current.Exit(); } _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 2; _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties); _frameInputNode.AddOutgoingConnection(_deviceOutputNode); _frameInputNode.Stop(); _player = new Player((int)nodeEncodingProperties.SampleRate); _player.PropertyChanged += (sender, args) => { switch (args.PropertyName) { case nameof(_player.IsPlaying): _synchronizationContext.Post(o => { if (!_player.IsPlaying && !_player.IsPausing && IsPlaying) { IsPlaying = CanStop = false; } }, null); break; } }; _frameInputNode.QuantumStarted += (sender, args) => { var numSamplesNeeded = (uint)args.RequiredSamples; if (numSamplesNeeded != 0) { //_synchronizationContext.Post(o => //{ // foreach (var a in Channels) // a.Update(); AudioFrame audioData = GenerateAudioData(numSamplesNeeded); _frameInputNode.AddFrame(audioData); //}, null); } }; _graph.Start(); _frameInputNode.Start(); /* * _player = new Player(); * * _buffer = _player.CreateBuffer(50); * * _bwp = new BufferedWaveProvider(new WaveFormat(44100, 16, 2)); * _nativePlayer = new WasapiOutRT(AudioClientShareMode.Shared, 50); * _nativePlayer.Init(() => _bwp); * _nativePlayer.Play(); */ IsInitialized = true; /* * while (true) * { * _player.GetBuffer(_buffer); * * var b = ToByte(_buffer); * _bwp.AddSamples(b, 0, b.Length); * while (_bwp.BufferedBytes > _buffer.Length * 2) * await Task.Delay(1); * } */ }