internal async Task Initialize() { // Create an AudioGraph with default settings var settings = new AudioGraphSettings(AudioRenderCategory.Media); var result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph Debug.WriteLine(string.Format("AudioGraph Creation Error because {0}", result.Status.ToString())); return; } _audioGraph = result.Graph; // Create a device output node var deviceOutputResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output Debug.WriteLine(string.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString())); return; } var deviceOutputNode = deviceOutputResult.DeviceOutputNode; Debug.WriteLine("Device Output Node successfully created"); CreateFrameInputNode(); _frameInputNode.AddOutgoingConnection(deviceOutputNode); }
async void CreateAudioGraphAsync() { var graphResult = await AudioGraph.CreateAsync(new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media)); if (graphResult.Status != AudioGraphCreationStatus.Success) { throw new InvalidOperationException($"Graph creation failed {graphResult.Status}"); } _graph = graphResult.Graph; var inputNodeResult = await _graph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media); if (inputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { throw new InvalidOperationException($"Input node creation failed {inputNodeResult.Status}"); } _inputNode = inputNodeResult.DeviceInputNode; _source = AudioVisualizer.PlaybackSource.CreateFromAudioNode(_inputNode); _converter = new SourceConverter(); _converter.Source = _source.Source; _converter.MinFrequency = 110.0f; // Note A2 _converter.MaxFrequency = 3520.0f; // Note A7 _converter.FrequencyCount = 12 * 5 * 5; // 5 octaves, 5 bars per note _converter.FrequencyScale = ScaleType.Logarithmic; _converter.SpectrumRiseTime = TimeSpan.FromMilliseconds(20); _converter.SpectrumFallTime = TimeSpan.FromMilliseconds(200); _converter.RmsRiseTime = TimeSpan.FromMilliseconds(20); // Use RMS to gate noise, fast rise slow fall _converter.RmsFallTime = TimeSpan.FromMilliseconds(500); _converter.ChannelCount = 1; notesSpectrum.Source = _converter; _graph.Start(); }
private async void Create() { OutputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector()); var settings = new AudioGraphSettings(AudioRenderCategory.Media) { QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency }; var audioGraphResult = await AudioGraph.CreateAsync(settings); if (audioGraphResult.Status != AudioGraphCreationStatus.Success) { throw new ApplicationException($"Audio graph error: {audioGraphResult.Status}"); } Graph = audioGraphResult.Graph; Graph.QuantumProcessed += (audioGraph, e) => AudioGraphQuantumProcessed(); Graph.QuantumStarted += (audioGraph, e) => AudioGraphQuantumStarted(); InputDevice = await CreateInputDevice().ConfigureAwait(true); OutputDevice = await CreateOutputDevice().ConfigureAwait(true); RecordingOutputNode = CreateFrameOutputNode(); Ready?.Invoke(this, EventArgs.Empty); }
public async Task InitializeSounds() { InputNodes = new ObservableCollection <AudioFileInputNode>(); FileInputNodesDictionary = new Dictionary <string, AudioFileInputNode>(); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); // make sure the audio output is available if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { outputNode = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); for (int i = 0; i < countdownSoundsNum; i++) { await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "1" + audioFileExt); await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "2" + audioFileExt); } for (int j = 0; j < doneSoundsNum; j++) { await CreateInputNodeFromFile(baseUri + doneBaseFileName + ((j >= 9) ? "" : "0") + (j + 1).ToString() + audioFileExt); } graph.Start(); } } }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph throw new Exception("error"); } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync(); _subMixNode = _graph.CreateSubmixNode(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output throw new Exception("error"); } _deviceOutputNode = deviceOutputResult.DeviceOutputNode; _subMixNode.AddOutgoingConnection(_deviceOutputNode); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph Logging.SingleInstance.LogMessage("AudioGraph Creation Error because " + result.Status); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node Logging.SingleInstance.LogMessage(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString())); return; } deviceOutput = deviceOutputNodeResult.DeviceOutputNode; Logging.SingleInstance.LogMessage("Device Output Node successfully created"); }
protected override async void OnNavigatedTo(NavigationEventArgs e) { base.OnNavigatedTo(e); var mediaSource = MediaSource.CreateFromUri(new Uri("ms-appx:///Test/GirlishLover.m4a")); await mediaSource.OpenAsync(); this.mpe.Source = mediaSource; this.mpe.MediaPlayer.MediaOpened += this.MediaPlayer_MediaOpened; var settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Other) { QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency }; var result = await AudioGraph.CreateAsync(settings); this.audioGraph = result.Graph; this.outNode = this.audioGraph.CreateFrameOutputNode(); this.fileNode = (await this.audioGraph.CreateFileInputNodeAsync(await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Test/GirlishLover.m4a")))).FileInputNode; this.fileNode.LoopCount = 0; this.fileNode.AddOutgoingConnection(this.outNode); this.fileNode.FileCompleted += this.FileNode_FileCompleted; this.audioGraph.QuantumStarted += this.AudioGraph_QuantumStarted; this.audioGraph.Start(); }
/// <summary> /// Creates an instance of AudioGraph and sets io_progress /// </summary> public async Task <CreateAudioGraphResult> Init( Progress <double> progress) { // set io_progress var to show progress of input-output _ioProgress = progress; // initialize settings for AudioGraph var settings = new AudioGraphSettings( AudioRenderCategory.Media ); // if audioGraph was previously created if (_audioGraph != null) { _audioGraph.Dispose(); _audioGraph = null; } var result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { _audioGraph = result.Graph; } return(result); }
public async Task <bool> InitializeSound() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return(false); } graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { deviceOutput = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); foreach (var soundMapping in soundMappings) { await AddFileToSoundDictionary("ms-appx:///Features/Game/Assets/" + soundMapping.Value); } graph.Start(); } isInitialized = true; return(isInitialized); }
// Initializes AudioGraph public async Task InitAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); audioGraph = result.Graph; }
private void CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = AudioGraph.CreateAsync(settings).GetResults(); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = graph.CreateDeviceOutputNodeAsync().GetResults(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output return; } deviceOutputNode = deviceOutputResult.DeviceOutputNode; }
public async Task Init() { AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media); var result = await AudioGraph.CreateAsync(audioGraphSettings); if (result == null || result.Status != AudioGraphCreationStatus.Success) { return; } audioGraph = result.Graph; var createAudioDeviceOutputResult = await audioGraph.CreateDeviceOutputNodeAsync(); if (createAudioDeviceOutputResult == null || createAudioDeviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } deviceOutputNode = createAudioDeviceOutputResult.DeviceOutputNode; AudioEncodingProperties audioEncodingProperties = new AudioEncodingProperties(); audioEncodingProperties.BitsPerSample = 32; audioEncodingProperties.ChannelCount = 2; audioEncodingProperties.SampleRate = 44100; audioEncodingProperties.Subtype = MediaEncodingSubtypes.Float; audioFrameInputNode = audioGraph.CreateFrameInputNode(audioEncodingProperties); audioFrameInputNode.QuantumStarted += FrameInputNode_QuantumStarted; audioFrameInputNode.AddOutgoingConnection(deviceOutputNode); audioGraph.Start(); }
public async Task <bool> InitializeAudioGraphAsync() { var audioGraphSettings = new AudioGraphSettings(AudioRenderCategory.Media); var result = await AudioGraph.CreateAsync(audioGraphSettings); LastStatus = result.Status.ToString(); if (result.Status != AudioGraphCreationStatus.Success) { return(false); } _audioGraph = result.Graph; _audioGraph.UnrecoverableErrorOccurred += (sender, args) => OnErrorOccurred(args); var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync(); LastStatus = outputResult.Status.ToString(); if (outputResult.Status != AudioDeviceNodeCreationStatus.Success) { _audioGraph.Dispose(); return(false); } _outputNode = outputResult.DeviceOutputNode; CreateEchoEffect(); CreateLimiterEffect(); CreateReverbEffect(); CreateEqualizerEffect(); return(true); }
/// <summary> /// Initializes the player service /// </summary> private async Task InitializeAsync() { var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media)); if (result.Status != AudioGraphCreationStatus.Success) { return; } _audioGraph = result.Graph; var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (outputResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _outputNode = outputResult.DeviceOutputNode; _audioGraph.Start(); foreach (var kvp in _filenames) { var storageFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///Assets/{kvp.Value}")); _loaded.Add(kvp.Key, storageFile); } }
public async Task <bool> InitializeAsync() { if (this.IsInitialized == true) { return(true); } var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media)); if (result.Status != AudioGraphCreationStatus.Success) { return(false); } _audioGraph = result.Graph; var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (outputResult.Status != AudioDeviceNodeCreationStatus.Success) { return(false); } _outputNode = outputResult.DeviceOutputNode; if (this.IsMute == false) { _audioGraph.Start(); } this.IsInitialized = true; return(true); }
private async Task InitAudioMeterAsync() { var result = await AudioGraph.CreateAsync(new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Speech)); if (result.Status == AudioGraphCreationStatus.Success) { this.audioGraph = result.Graph; var audioDevice = (AudioDeviceComboBox.SelectedItem as ComboBoxItem); if (audioDevice == null) { return; } var microphone = await DeviceInformation.CreateFromIdAsync(audioDevice.Tag.ToString()); var inProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High); var inputResult = await this.audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Speech, inProfile.Audio, microphone); if (inputResult.Status != AudioDeviceNodeCreationStatus.Success) { var msg = new MessageDialog("Device is not available"); await msg.ShowAsync(); return; } this.audioGraph.Start(); var source = PlaybackSource.CreateFromAudioNode(inputResult.DeviceInputNode); AudioDiscreteVUBar.Source = source.Source; } }
public async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; settings.PrimaryRenderDevice = outputDevices[0]; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { MessageDialog md = new MessageDialog("Error in creating Audio Graph", "OOPS!!"); await md.ShowAsync(); return; } graph = result.Graph; CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { MessageDialog md = new MessageDialog("Error in creating InputDeviceNode", "OOPS!!"); await md.ShowAsync(); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; }
/// <summary> /// Creates the audio graph output.<br/> /// IMPORTANT: Only 32-bit IEEEFloat format is supported! /// </summary> /// <param name="ChannelCount">The number of channels. Default: 2(Stereo)</param> /// <param name="SampleRate">The sample rate. Default: 192000Hz</param> /// <returns></returns> /// <exception cref="System.Exception">AudioGraph creation error: " + result.Status.ToString()</exception> public static async Task <AudioGraphOutput> CreateAudioGraphOutput(uint ChannelCount = 2, uint SampleRate = 192000) { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media) { QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired, EncodingProperties = new AudioEncodingProperties() { BitsPerSample = 32, ChannelCount = ChannelCount, SampleRate = SampleRate, Subtype = "Float" } }; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { throw new Exception("AudioGraph creation error: " + result.Status.ToString(), result.ExtendedError); } CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await result.Graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { throw new Exception("AudioGraph creation error: " + deviceOutputNodeResult.Status.ToString(), deviceOutputNodeResult.ExtendedError); } return(new AudioGraphOutput(result.Graph, deviceOutputNodeResult.DeviceOutputNode)); }
/// <summary> /// Update system sample rate asynchronously. /// </summary> /// <returns>Task represents the asynchronous operation.</returns> public async Task <int> UpdateSampleRate() { try { var result = await AudioGraph.CreateAsync( new AudioGraphSettings(AudioRenderCategory.Media)); if (result.Status == AudioGraphCreationStatus.Success) { var rate = (int)result.Graph.EncodingProperties.SampleRate; SetSystemSampleRate(rate); result.Graph.Dispose(); return(rate); } else { SetSystemSampleRate(0); } } catch { // Ignore } return(0); }
private async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; graph.EncodingProperties.SampleRate = 44100; CreateAudioDeviceOutputNodeResult deviceOutputResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputResult.DeviceOutputNode; NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); }
public async Task Initialize() { using (var release = await InitializeLock.LockAsync()) { var pcmEncoding = AudioEncodingProperties.CreatePcm(48000, 1, 16); var result = await AudioGraph.CreateAsync( new AudioGraphSettings(AudioRenderCategory.GameChat) { DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw, AudioRenderCategory = AudioRenderCategory.GameChat, EncodingProperties = pcmEncoding } ); if (result.Status != AudioGraphCreationStatus.Success) { throw new Exception(); } AudioGraph = result.Graph; // マイク入力を初期化 Input = await AudioInputManager.CreateAsync(AudioGraph); Input.MinGain = _MicVolume; Input.InputDeviceStateChanged += Input_InputDeviceStateChanged; this.InputDeviceState = Input.InputDeviceState; // スピーカー出力を初期化 Output = await AudioOutputManager.CreateAsync(AudioGraph); Output.SpeakerGain = _SpeakerVolume; } }
public IAsyncOperation <AudioFrameReader> OpenAudioFrameReaderAsync() { return(Task.Run(async() => { if (AudioReader == null) { var microphones = await DeviceInformation.FindAllAsync(DeviceInformation.GetAqsFilterFromDeviceClass(DeviceClass.AudioCapture)); var kinectMicArray = microphones.FirstOrDefault(mic => mic.Name.ToLowerInvariant().Contains("xbox nui sensor")); if (kinectMicArray != null) { //TODO: review parameters var settings = new AudioGraphSettings(AudioRenderCategory.Speech); settings.EncodingProperties = AudioEncodingProperties.CreatePcm(16000, 4, 32); settings.EncodingProperties.Subtype = MediaEncodingSubtypes.Float; settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; settings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Raw; var audioGraphResult = await AudioGraph.CreateAsync(settings); if (audioGraphResult.Status == AudioGraphCreationStatus.Success) { var inputNodeResult = await audioGraphResult.Graph.CreateDeviceInputNodeAsync(MediaCategory.Speech, audioGraphResult.Graph.EncodingProperties, kinectMicArray); if (inputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { var output = audioGraphResult.Graph.CreateFrameOutputNode(audioGraphResult.Graph.EncodingProperties); AudioReader = new AudioFrameReader(audioGraphResult.Graph, output); } } } } AudioReader?.Open(); return AudioReader; }).AsAsyncOperation()); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutput = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); }
public static async void Play(SoundEffect effect) { var settings = new AudioGraphSettings(AudioRenderCategory.SoundEffects); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; var result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Audio/sent.mp3")); var fileInputNodeResult = await result.Graph.CreateFileInputNodeAsync(file); if (fileInputNodeResult.Status != AudioFileNodeCreationStatus.Success) { return; } var deviceOutputNodeResult = await result.Graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } fileInputNodeResult.FileInputNode .AddOutgoingConnection(deviceOutputNodeResult.DeviceOutputNode); result.Graph.Start(); }
public async void CaptureAudio() { AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Speech); var result = await AudioGraph.CreateAsync(audioGraphSettings); if (result.Status != AudioGraphCreationStatus.Success) { return; } _audioGraph = result.Graph; var deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Speech); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } var deviceInputNode = deviceInputNodeResult.DeviceInputNode; _audioFile = await Windows.Storage.ApplicationData.Current.TemporaryFolder .CreateFileAsync("speech", CreationCollisionOption.ReplaceExisting); var mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High); var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(_audioFile, mediaEncodingProfile); if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success) { return; } var fileOutputNode = fileOutputNodeResult.FileOutputNode; deviceInputNode.AddOutgoingConnection(fileOutputNode); _audioGraph.Start(); }
private async void initGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); deviceOutput = deviceOutputNodeResult.DeviceOutputNode; FileOpenPicker filePicker = new FileOpenPicker(); filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary; filePicker.FileTypeFilter.Add(".mp3"); filePicker.FileTypeFilter.Add(".wav"); StorageFile file = await GetPackagedFile(null, "audio.mp3"); CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file); fileInput = fileInputResult.FileInputNode; fileInput.AddOutgoingConnection(deviceOutput); graph.Start(); }
// Init the AudioGraph // despite the Aync methods - this will exec synchronously to get the InitPhase only get done when all is available private void InitAudioGraph( ) { LOG.Log("InitAudioGraph: Begin"); if (!_canPlay) { LOG.Log("InitAudioGraph: Canceled with _canPlay = false"); return; // cannot even try.. } // MUST WAIT UNTIL all items are created, else one may call Play too early... // cleanup existing items if (_deviceOutputNode != null) { _deviceOutputNode.Dispose( ); _deviceOutputNode = null; } if (_audioGraph != null) { _audioGraph.Dispose( ); _audioGraph = null; } // Create an AudioGraph AudioGraphSettings settings = new AudioGraphSettings(_renderCat) { PrimaryRenderDevice = null, // If PrimaryRenderDevice is null, the default playback device will be used. MaxPlaybackSpeedFactor = 2, // should preserve some memory }; // We await here the execution without providing an async method ... var resultAG = WindowsRuntimeSystemExtensions.AsTask(AudioGraph.CreateAsync(settings)); resultAG.Wait( ); if (resultAG.Result.Status != AudioGraphCreationStatus.Success) { LOG.LogError($"InitAudioGraph: Failed to create AudioGraph with RenderCategory: {_renderCat}"); LOG.LogError($"InitAudioGraph: AudioGraph creation: {resultAG.Result.Status}, TaskStatus: {resultAG.Status}" + $"\nExtError: {resultAG.Result.ExtendedError}"); _canPlay = false; return; } _audioGraph = resultAG.Result.Graph; LOG.Log($"InitAudioGraph: AudioGraph: [{_audioGraph.EncodingProperties}]"); // Create a device output node // The output node uses the PrimaryRenderDevice of the audio graph. // We await here the execution without providing an async method ... var resultDO = WindowsRuntimeSystemExtensions.AsTask(_audioGraph.CreateDeviceOutputNodeAsync()); resultDO.Wait( ); if (resultDO.Result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node LOG.LogError($"InitAudioGraph: DeviceOutputNode creation: {resultDO.Result.Status}, TaskStatus: {resultDO.Status}" + $"\nExtError: {resultDO.Result.ExtendedError}"); _canPlay = false; return; } _deviceOutputNode = resultDO.Result.DeviceOutputNode; LOG.Log($"InitAudioGraph: DeviceOutputNode: [{_deviceOutputNode.Device}]"); LOG.Log($"InitAudioGraph: InitAudioGraph-END"); }
public async Task InitializeSounds() { soundBankInitializer = new SoundBanksInitializer(); POneInputNodes = new ObservableCollection <AudioFileInputNode>(); PTwoInputNodes = new ObservableCollection <AudioFileInputNode>(); WOneInputNodes = new ObservableCollection <AudioFileInputNode>(); WTwoInputNodes = new ObservableCollection <AudioFileInputNode>(); InputNodesList = new ObservableCollection <ObservableCollection <AudioFileInputNode> >(); InputNodesList.Add(POneInputNodes); InputNodesList.Add(PTwoInputNodes); InputNodesList.Add(WOneInputNodes); InputNodesList.Add(WTwoInputNodes); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { graph = result.Graph; // create the output device CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); // make sure the audio output is available if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); foreach (SoundBank soundBank in soundBankInitializer.SoundBanks) { foreach (string fileName in soundBank.FileNames[0]) { await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName); InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]); } foreach (string fileName in soundBank.FileNames[1]) { await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName); FileInputNodesDictionary[fileName].LoopCount = null; InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]); } } InitializeEffects(); graph.Start(); } } }
// Find a valid AudioGraph RenderCategory // this should leave _renderCat with a valid one or _renderNone private void FindRenderCategory( ) { // A list of tryouts for the output rendering Queue <AudioRenderCategory> renderSequence = new Queue <AudioRenderCategory>(new [] { AudioRenderCategory.Speech, AudioRenderCategory.GameChat, AudioRenderCategory.GameEffects, AudioRenderCategory.SoundEffects, AudioRenderCategory.Media, AudioRenderCategory.Other, // Finally the Not Available Cat _renderNone, }); _renderCat = renderSequence.Dequeue( ); // Try a cat that works do { // Create an AudioGraph AudioGraphSettings settings = new AudioGraphSettings(_renderCat) { PrimaryRenderDevice = null, // If PrimaryRenderDevice is null, the default playback device will be used. }; LOG.Log($"FindRenderCategory: About to test AudioGraph with RenderCategory: {_renderCat}"); // We await here the execution without providing an async method ... var resultAG = WindowsRuntimeSystemExtensions.AsTask(AudioGraph.CreateAsync(settings)); resultAG.Wait( ); if (resultAG.Result.Status != AudioGraphCreationStatus.Success) { LOG.LogError($"FindRenderCategory: AudioGraph test error: {resultAG.Result.Status}, TaskStatus: {resultAG.Status}" + $"\nExtError: {resultAG.Result.ExtendedError}"); // try next category if there is one left if (renderSequence.Count > 0) { _renderCat = renderSequence.Dequeue( ); } else { // sanity - should never happen LOG.LogError($"FindRenderCategory: Program error - Queue overrun"); _renderCat = _renderNone; return; } } else { resultAG.Result.Graph?.Dispose( ); // not used after tryout LOG.Log($"FindRenderCategory: Success with RenderCategory: {_renderCat}"); return; // _renderCat contains a successful one } } while (_renderCat != _renderNone); LOG.LogError($"FindRenderCategory: Failed to find a working RenderCategory - cannot speak"); _canSpeak = false; return; // could not resolve - left with _renderNone }
private async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; settings.PrimaryRenderDevice = outputDevices[outputDevicesListBox.SelectedIndex - 1]; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage); // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); outputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage); outputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage); inputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage); inputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Since graph is successfully created, enable the button to select a file output fileButton.IsEnabled = true; // Disable the graph button to prevent accidental click createGraphButton.IsEnabled = false; // Because we are using lowest latency setting, we need to handle device disconnection errors graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; }