public async Task InitializeSounds() { InputNodes = new ObservableCollection <AudioFileInputNode>(); FileInputNodesDictionary = new Dictionary <string, AudioFileInputNode>(); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); // make sure the audio output is available if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { outputNode = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); for (int i = 0; i < countdownSoundsNum; i++) { await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "1" + audioFileExt); await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "2" + audioFileExt); } for (int j = 0; j < doneSoundsNum; j++) { await CreateInputNodeFromFile(baseUri + doneBaseFileName + ((j >= 9) ? "" : "0") + (j + 1).ToString() + audioFileExt); } graph.Start(); } } }
private async void initGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); deviceOutput = deviceOutputNodeResult.DeviceOutputNode; FileOpenPicker filePicker = new FileOpenPicker(); filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary; filePicker.FileTypeFilter.Add(".mp3"); filePicker.FileTypeFilter.Add(".wav"); StorageFile file = await GetPackagedFile(null, "audio.mp3"); CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file); fileInput = fileInputResult.FileInputNode; fileInput.AddOutgoingConnection(deviceOutput); graph.Start(); }
private void CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = AudioGraph.CreateAsync(settings).GetResults(); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = graph.CreateDeviceOutputNodeAsync().GetResults(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output return; } deviceOutputNode = deviceOutputResult.DeviceOutputNode; }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph Logging.SingleInstance.LogMessage("AudioGraph Creation Error because " + result.Status); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node Logging.SingleInstance.LogMessage(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString())); return; } deviceOutput = deviceOutputNodeResult.DeviceOutputNode; Logging.SingleInstance.LogMessage("Device Output Node successfully created"); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutput = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph throw new Exception("error"); } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync(); _subMixNode = _graph.CreateSubmixNode(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output throw new Exception("error"); } _deviceOutputNode = deviceOutputResult.DeviceOutputNode; _subMixNode.AddOutgoingConnection(_deviceOutputNode); }
private async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; graph.EncodingProperties.SampleRate = 44100; CreateAudioDeviceOutputNodeResult deviceOutputResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputResult.DeviceOutputNode; NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); }
private void Recover() { App.Log("Recover MediaPlayer"); graph.Stop(); try { mainInputNode.Dispose(); } catch (Exception) { } try { subInputNode.Dispose(); } catch (Exception) { } try { outputNode.Dispose(); } catch (Exception) { } mainInputNode = null; subInputNode = null; outputNode = null; mainSong = null; subSong = null; try { graph.Dispose(); } catch (Exception) { } graph = null; Init(); }
public async Task <bool> InitializeAudioGraphAsync() { var audioGraphSettings = new AudioGraphSettings(AudioRenderCategory.Media); var result = await AudioGraph.CreateAsync(audioGraphSettings); LastStatus = result.Status.ToString(); if (result.Status != AudioGraphCreationStatus.Success) { return(false); } _audioGraph = result.Graph; _audioGraph.UnrecoverableErrorOccurred += (sender, args) => OnErrorOccurred(args); var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync(); LastStatus = outputResult.Status.ToString(); if (outputResult.Status != AudioDeviceNodeCreationStatus.Success) { _audioGraph.Dispose(); return(false); } _outputNode = outputResult.DeviceOutputNode; CreateEchoEffect(); CreateLimiterEffect(); CreateReverbEffect(); CreateEqualizerEffect(); return(true); }
public async Task <bool> InitializeAsync() { if (this.IsInitialized == true) { return(true); } var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media)); if (result.Status != AudioGraphCreationStatus.Success) { return(false); } _audioGraph = result.Graph; var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (outputResult.Status != AudioDeviceNodeCreationStatus.Success) { return(false); } _outputNode = outputResult.DeviceOutputNode; if (this.IsMute == false) { _audioGraph.Start(); } this.IsInitialized = true; return(true); }
public async Task Init() { AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media); var result = await AudioGraph.CreateAsync(audioGraphSettings); if (result == null || result.Status != AudioGraphCreationStatus.Success) { return; } audioGraph = result.Graph; var createAudioDeviceOutputResult = await audioGraph.CreateDeviceOutputNodeAsync(); if (createAudioDeviceOutputResult == null || createAudioDeviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } deviceOutputNode = createAudioDeviceOutputResult.DeviceOutputNode; AudioEncodingProperties audioEncodingProperties = new AudioEncodingProperties(); audioEncodingProperties.BitsPerSample = 32; audioEncodingProperties.ChannelCount = 2; audioEncodingProperties.SampleRate = 44100; audioEncodingProperties.Subtype = MediaEncodingSubtypes.Float; audioFrameInputNode = audioGraph.CreateFrameInputNode(audioEncodingProperties); audioFrameInputNode.QuantumStarted += FrameInputNode_QuantumStarted; audioFrameInputNode.AddOutgoingConnection(deviceOutputNode); audioGraph.Start(); }
/// <summary> /// Initializes the player service /// </summary> private async Task InitializeAsync() { var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media)); if (result.Status != AudioGraphCreationStatus.Success) { return; } _audioGraph = result.Graph; var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (outputResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _outputNode = outputResult.DeviceOutputNode; _audioGraph.Start(); foreach (var kvp in _filenames) { var storageFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///Assets/{kvp.Value}")); _loaded.Add(kvp.Key, storageFile); } }
public async Task Init() { var graphResult = await Windows.Media.Audio.AudioGraph.CreateAsync(new Windows.Media.Audio.AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media) { }); if (graphResult.Status != Windows.Media.Audio.AudioGraphCreationStatus.Success) { throw new Exception("Faild to Create Audio Graph"); } graph = graphResult.Graph; var outPutResult = await graph.CreateDeviceOutputNodeAsync(); if (outPutResult.Status != Windows.Media.Audio.AudioDeviceNodeCreationStatus.Success) { throw new Exception("Faild To Create DeviceOutput"); } outputNode = outPutResult.DeviceOutputNode; graph.QuantumProcessed += MainGraph_QuantumProcessed; }
// create the audio graph and output private async void InitAudioGraph() { var settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; // pick lowest latency available to devices in the graph // create the audio graph _graph = (await AudioGraph.CreateAsync(settings)).Graph; if (_graph == null) { // failed to create audio graph MessageDialog dlg = new MessageDialog("Failed to create audio graph"); await dlg.ShowAsync(); return; } // create the output. You could also create file output here to stream to a temp file or similar _deviceOutput = (await _graph.CreateDeviceOutputNodeAsync()).DeviceOutputNode; if (_deviceOutput == null) { // failed to create audio output MessageDialog dlg = new MessageDialog("Failed to create device output"); await dlg.ShowAsync(); return; } // load all of the samples into graph nodes BuildFileNodes(); // start playback _graph.Start(); }
public void Stop() { m_audioDataMutex.WaitOne(); m_isRunning = false; m_isFlushing = false; if (m_audioGraph != null) { m_audioGraph.Stop(); } if (m_deviceOutputNode != null) { m_deviceOutputNode.Dispose(); m_deviceOutputNode = null; } if (m_frameInputNode != null) { m_frameInputNode.Dispose(); m_frameInputNode = null; } if (m_audioGraph != null) { m_audioGraph.Dispose(); m_audioGraph = null; } m_audioData = null; m_audioDataMutex.ReleaseMutex(); }
public async Task <bool> InitializeSound() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return(false); } graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { deviceOutput = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); foreach (var soundMapping in soundMappings) { await AddFileToSoundDictionary("ms-appx:///Features/Game/Assets/" + soundMapping.Value); } graph.Start(); } isInitialized = true; return(isInitialized); }
public async Task InitializeSounds() { soundBankInitializer = new SoundBanksInitializer(); POneInputNodes = new ObservableCollection <AudioFileInputNode>(); PTwoInputNodes = new ObservableCollection <AudioFileInputNode>(); WOneInputNodes = new ObservableCollection <AudioFileInputNode>(); WTwoInputNodes = new ObservableCollection <AudioFileInputNode>(); InputNodesList = new ObservableCollection <ObservableCollection <AudioFileInputNode> >(); InputNodesList.Add(POneInputNodes); InputNodesList.Add(PTwoInputNodes); InputNodesList.Add(WOneInputNodes); InputNodesList.Add(WTwoInputNodes); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { graph = result.Graph; // create the output device CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); // make sure the audio output is available if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); foreach (SoundBank soundBank in soundBankInitializer.SoundBanks) { foreach (string fileName in soundBank.FileNames[0]) { await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName); InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]); } foreach (string fileName in soundBank.FileNames[1]) { await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName); FileInputNodesDictionary[fileName].LoopCount = null; InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]); } } InitializeEffects(); graph.Start(); } } }
// Init the AudioGraph // despite the Aync methods - this will exec synchronously to get the InitPhase only get done when all is available private void InitAudioGraph( ) { LOG.Log("InitAudioGraph: Begin"); if (!_canPlay) { LOG.Log("InitAudioGraph: Canceled with _canPlay = false"); return; // cannot even try.. } // MUST WAIT UNTIL all items are created, else one may call Play too early... // cleanup existing items if (_deviceOutputNode != null) { _deviceOutputNode.Dispose( ); _deviceOutputNode = null; } if (_audioGraph != null) { _audioGraph.Dispose( ); _audioGraph = null; } // Create an AudioGraph AudioGraphSettings settings = new AudioGraphSettings(_renderCat) { PrimaryRenderDevice = null, // If PrimaryRenderDevice is null, the default playback device will be used. MaxPlaybackSpeedFactor = 2, // should preserve some memory }; // We await here the execution without providing an async method ... var resultAG = WindowsRuntimeSystemExtensions.AsTask(AudioGraph.CreateAsync(settings)); resultAG.Wait( ); if (resultAG.Result.Status != AudioGraphCreationStatus.Success) { LOG.LogError($"InitAudioGraph: Failed to create AudioGraph with RenderCategory: {_renderCat}"); LOG.LogError($"InitAudioGraph: AudioGraph creation: {resultAG.Result.Status}, TaskStatus: {resultAG.Status}" + $"\nExtError: {resultAG.Result.ExtendedError}"); _canPlay = false; return; } _audioGraph = resultAG.Result.Graph; LOG.Log($"InitAudioGraph: AudioGraph: [{_audioGraph.EncodingProperties}]"); // Create a device output node // The output node uses the PrimaryRenderDevice of the audio graph. // We await here the execution without providing an async method ... var resultDO = WindowsRuntimeSystemExtensions.AsTask(_audioGraph.CreateDeviceOutputNodeAsync()); resultDO.Wait( ); if (resultDO.Result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node LOG.LogError($"InitAudioGraph: DeviceOutputNode creation: {resultDO.Result.Status}, TaskStatus: {resultDO.Status}" + $"\nExtError: {resultDO.Result.ExtendedError}"); _canPlay = false; return; } _deviceOutputNode = resultDO.Result.DeviceOutputNode; LOG.Log($"InitAudioGraph: DeviceOutputNode: [{_deviceOutputNode.Device}]"); LOG.Log($"InitAudioGraph: InitAudioGraph-END"); }
private async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; settings.PrimaryRenderDevice = outputDevices[outputDevicesListBox.SelectedIndex - 1]; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage); // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); outputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage); outputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage); inputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage); inputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Since graph is successfully created, enable the button to select a file output fileButton.IsEnabled = true; // Disable the graph button to prevent accidental click createGraphButton.IsEnabled = false; // Because we are using lowest latency setting, we need to handle device disconnection errors graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; }
private async Task InitializeAudioAsync() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.EncodingProperties = AudioEncodingProperties.CreatePcm(22050, 1, 16); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties); _frameInputNode.AddOutgoingConnection(_deviceOutputNode); _frameOutputNode = _graph.CreateFrameOutputNode(nodeEncodingProperties); _deviceInputNode.AddOutgoingConnection(_frameOutputNode); // Initialize the Frame Input Node in the stopped state _frameInputNode.Stop(); // Hook up an event handler so we can start generating samples when needed // This event is triggered when the node is required to provide data _frameInputNode.QuantumStarted += node_QuantumStarted; _graph.QuantumProcessed += GraphOnQuantumProcessed; // Start the graph since we will only start/stop the frame input node _graph.Start(); }
public static async Task AudioDevices() { if (beep == null) { beep = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Audio/beep.wav")); } DeviceInformationCollection devices = await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(Windows.Media.Devices.MediaDevice.GetAudioRenderSelector()); DeviceInformation selectedDevice = null; // Sometimes usb mics are confused as speakers.. so add check to make Speakers (Raspberry Pi 2 audio) the default <-- Confirmed works on rpi3 //for (int i = 0; i <= devices.Count; i++) //{ // if (i != devices.Count) // { // if (DeviceTypeInformation.IsRaspberryPi) // { // if (devices[i].Name == "Speakers (Raspberry Pi 2 audio)") // { // selectedDevice = devices[i]; // break; // } // } // else // { // selectedDevice = devices[i]; // break; // } // } //} settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.PrimaryRenderDevice = selectedDevice; CreateAudioGraphResult resultg = await AudioGraph.CreateAsync(settings); audioflow = resultg.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await audioflow.CreateDeviceOutputNodeAsync(); CreateAudioDeviceInputNodeResult deviceInputNodeResult = await audioflow.CreateDeviceInputNodeAsync(MediaCategory.Media); deviceOuput = deviceOutputNodeResult.DeviceOutputNode; try { _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(); _mediaCapture.Failed += _mediaCapture_Failed; _mediaCapture.AudioDeviceController.VolumePercent = 0.75f; } catch { Debug.WriteLine("Failed to setup microphone is one connected?"); } }
private async Task CreateAudioGraph() { var settings = new AudioGraphSettings(AudioRenderCategory.Media); var result = await AudioGraph.CreateAsync(settings); _graph = result.Graph; var deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync(); _deviceOutput = deviceOutputNodeResult.DeviceOutputNode; _graph.ResetAllNodes(); _graph.Start(); }
public async Task InitializeAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); this._audioGraph = result.Graph; CreateAudioDeviceOutputNodeResult outputDeviceNodeResult = await this._audioGraph.CreateDeviceOutputNodeAsync(); _deviceOutputNode = outputDeviceNodeResult.DeviceOutputNode; }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; settings.DesiredSamplesPerQuantum = bufferLength; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; Debug.WriteLine($"Set samples per quantum to {graph.SamplesPerQuantum}"); // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; CreateNotes(noteCount, nodeEncodingProperties, deviceOutputNode); double lowNote = inputNotes.Values.OrderBy(p => p.frequency).First().frequency; double hiNote = inputNotes.Values.OrderByDescending(p => p.frequency).First().frequency; noteCount = inputNotes.Keys.Count; var mSLength = 1000.0 * (double)bufferLength / 48000.0; setupDescription = $"playing {noteCount} notes in {noteCount / 3} octaves ({lowNote:0.0} -> {hiNote:0.0}), {graph.SamplesPerQuantum} samples, {mSLength:0.0}mS buffers"; DetailText.Text = setupDescription; frameContainer.Background = new SolidColorBrush(Colors.Green); // Start the graph since we will only start/stop the frame input node graph.Start(); }
private async Task CreateDeviceOutputNode() { CreateAudioDeviceOutputNodeResult result = await audioGraph.CreateDeviceOutputNodeAsync(); if (result.Status != AudioDeviceNodeCreationStatus.Success) { throw new Exception($"Could not create device output node. { Logger.GetExceptionLocalization(this) }"); } deviceOutput = result.DeviceOutputNode; }
/// <summary> /// Create a node to output audio data to the default audio device (e.g. soundcard) /// </summary> private async Task CreateDefaultDeviceOutputNode() { CreateAudioDeviceOutputNodeResult result = await _graph.CreateDeviceOutputNodeAsync(); if (result.Status != AudioDeviceNodeCreationStatus.Success) { throw new Exception(result.Status.ToString()); } _deviceOutputNode = result.DeviceOutputNode; }
public void Dispose() { _audioGraph?.Dispose(); _outputNode?.Dispose(); _inputNode?.Dispose(); _timer?.Dispose(); _timer = null; _audioGraph = null; _outputNode = null; _inputNode = null; }
private void CreateNotes(int noteCount, AudioEncodingProperties props, AudioDeviceOutputNode outputNode) { for (int i = 0; i < noteCount; i++) { double fr = 0; if (i < 3) { switch (i) { case 0: fr = startC; break; case 1: fr = startE; break; case 2: fr = startG; break; } } else { var theEnum = inputNotes.Values; var lastOfNote = theEnum.ToList()[i - 3]; fr = lastOfNote.frequency * 2.0; } // Initialize the Frame Input Node in the stopped state var nyQuist = graph.EncodingProperties.SampleRate / 2.0; // Hook up an event handler so we can start generating samples when needed // This event is triggered when the node is required to provide data if (fr > nyQuist) { break; // no need to generate notes higher than } // the nyQuist frequency which will just sound like noise. var inNode = graph.CreateFrameInputNode(props); inNode.AddOutgoingConnection(outputNode); inNode.Stop(); inNode.QuantumStarted += node_QuantumStarted; var note = new Note(graph, inNode, fr); inputNotes.Add(inNode, note); } }
private async Task InitializeAudioOutput() { var deviceOutputNodeCreateResult = await _AudioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeCreateResult.Status != AudioDeviceNodeCreationStatus.Success) { throw new Exception(deviceOutputNodeCreateResult.Status.ToString()); } var outputNode = deviceOutputNodeCreateResult.DeviceOutputNode; _OutputNode = outputNode; }
public async Task InitializeAsync() { DebugUtil.CheckAppThread(); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); // settings.DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw; settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); DebugUtil.Assert(result.Status == AudioGraphCreationStatus.Success, "Failed to create audio graph"); _audioGraph = result.Graph; int latencyInSamples = _audioGraph.LatencyInSamples; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = _audioGraph.CreateDeviceOutputNodeAsync().GetResults(); DebugUtil.Assert(deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success, $"Audio Device Output unavailable because {deviceOutputNodeResult.Status}"); _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; _inputCaptureNode = _audioGraph.CreateFrameOutputNode(); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); DebugUtil.Assert(deviceInputNodeResult.Status == AudioDeviceNodeCreationStatus.Success, $"Audio Device Input unavailable because {deviceInputNodeResult.Status}"); _deviceInputNode = deviceInputNodeResult.DeviceInputNode; _deviceInputNode.AddOutgoingConnection(_inputCaptureNode); _deviceInputNode.AddOutgoingConnection(_deviceOutputNode); /* * echoEffect = new EchoEffectDefinition(_graph); * echoEffect.WetDryMix = 0.7f; * echoEffect.Feedback = 0.5f; * echoEffect.Delay = 500.0f; * submixNode.EffectDefinitions.Add(echoEffect); * * // Disable the effect in the beginning. Enable in response to user action (UI toggle switch) * submixNode.DisableEffectsByDefinition(echoEffect); */ // All nodes can have an OutgoingGain property // Setting the gain on the Submix node attenuates the output of the node //_submixNode.OutgoingGain = 0.5; }
private async Task InitOuputNode() { var audioDeviceOutputNodeResult = await ag.CreateDeviceOutputNodeAsync(); if (audioDeviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { rootPage.ShowLogs("Output device failed: " + audioDeviceOutputNodeResult.Status); return; } audioO = audioDeviceOutputNodeResult.DeviceOutputNode; rootPage.ShowLogs("Output Node initialized successfully: " + audioO?.Device?.Name + " (channels :" + audioO?.EncodingProperties.ChannelCount + " )"); }
private async Task InitializeAsync() { var settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.GameEffects); var graphCreationResult = await AudioGraph.CreateAsync(settings); _graph = graphCreationResult.Graph; var deviceOutputCreationResult = await _graph.CreateDeviceOutputNodeAsync(); _deviceOutput = deviceOutputCreationResult.DeviceOutputNode; _graph.ResetAllNodes(); _graph.Start(); }
private async Task CreateSpeakerOutputNode() { var result = await _audioGraph.CreateDeviceOutputNodeAsync(); if (result.Status != AudioDeviceNodeCreationStatus.Success) { // TODO: Error handling! // Cannot create device output node // ShowErrorMessage(result.Status.ToString()); return; } _speakerOutputNode = result.DeviceOutputNode; _toxInputNode.AddOutgoingConnection(_speakerOutputNode); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default setting AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Can't create the graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); submixNode = graph.CreateSubmixNode(); subMixNode.Background = new SolidColorBrush(Colors.Green); submixNode.AddOutgoingConnection(deviceOutputNode); echoEffect = new EchoEffectDefinition(graph); echoEffect.WetDryMix = 0.7f; echoEffect.Feedback = 0.5f; echoEffect.Delay = 500.0f; submixNode.EffectDefinitions.Add(echoEffect); // Disable the effect in the beginning. Enable in response to user action (UI toggle switch) submixNode.DisableEffectsByDefinition(echoEffect); // All nodes can have an OutgoingGain property // Setting the gain on the Submix node attenuates the output of the node submixNode.OutgoingGain = 0.5; // Graph successfully created. Enable buttons to load files fileButton1.IsEnabled = true; fileButton2.IsEnabled = true; }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties); frameInputNode.AddOutgoingConnection(deviceOutputNode); frameContainer.Background = new SolidColorBrush(Colors.Green); // Initialize the Frame Input Node in the stopped state frameInputNode.Stop(); // Hook up an event handler so we can start generating samples when needed // This event is triggered when the node is required to provide data frameInputNode.QuantumStarted += node_QuantumStarted; // Start the graph since we will only start/stop the frame input node graph.Start(); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph await ShowMessage(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString())); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node await ShowMessage(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString())); return; } deviceOutput = deviceOutputNodeResult.DeviceOutputNode; await ShowMessage("Device Output Node successfully created"); }