public async Task InitializeSounds() { InputNodes = new ObservableCollection <AudioFileInputNode>(); FileInputNodesDictionary = new Dictionary <string, AudioFileInputNode>(); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); // make sure the audio output is available if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { outputNode = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); for (int i = 0; i < countdownSoundsNum; i++) { await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "1" + audioFileExt); await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "2" + audioFileExt); } for (int j = 0; j < doneSoundsNum; j++) { await CreateInputNodeFromFile(baseUri + doneBaseFileName + ((j >= 9) ? "" : "0") + (j + 1).ToString() + audioFileExt); } graph.Start(); } } }
private async void initGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); deviceOutput = deviceOutputNodeResult.DeviceOutputNode; FileOpenPicker filePicker = new FileOpenPicker(); filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary; filePicker.FileTypeFilter.Add(".mp3"); filePicker.FileTypeFilter.Add(".wav"); StorageFile file = await GetPackagedFile(null, "audio.mp3"); CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file); fileInput = fileInputResult.FileInputNode; fileInput.AddOutgoingConnection(deviceOutput); graph.Start(); }
public async Task <bool> InitializeSound() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return(false); } graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { deviceOutput = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); foreach (var soundMapping in soundMappings) { await AddFileToSoundDictionary("ms-appx:///Features/Game/Assets/" + soundMapping.Value); } graph.Start(); } isInitialized = true; return(isInitialized); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph Logging.SingleInstance.LogMessage("AudioGraph Creation Error because " + result.Status); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node Logging.SingleInstance.LogMessage(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString())); return; } deviceOutput = deviceOutputNodeResult.DeviceOutputNode; Logging.SingleInstance.LogMessage("Device Output Node successfully created"); }
/// <summary> /// Creates the audio graph output.<br/> /// IMPORTANT: Only 32-bit IEEEFloat format is supported! /// </summary> /// <param name="ChannelCount">The number of channels. Default: 2(Stereo)</param> /// <param name="SampleRate">The sample rate. Default: 192000Hz</param> /// <returns></returns> /// <exception cref="System.Exception">AudioGraph creation error: " + result.Status.ToString()</exception> public static async Task <AudioGraphOutput> CreateAudioGraphOutput(uint ChannelCount = 2, uint SampleRate = 192000) { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media) { QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired, EncodingProperties = new AudioEncodingProperties() { BitsPerSample = 32, ChannelCount = ChannelCount, SampleRate = SampleRate, Subtype = "Float" } }; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { throw new Exception("AudioGraph creation error: " + result.Status.ToString(), result.ExtendedError); } CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await result.Graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { throw new Exception("AudioGraph creation error: " + deviceOutputNodeResult.Status.ToString(), deviceOutputNodeResult.ExtendedError); } return(new AudioGraphOutput(result.Graph, deviceOutputNodeResult.DeviceOutputNode)); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph throw new Exception("error"); } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync(); _subMixNode = _graph.CreateSubmixNode(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output throw new Exception("error"); } _deviceOutputNode = deviceOutputResult.DeviceOutputNode; _subMixNode.AddOutgoingConnection(_deviceOutputNode); }
private void CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = AudioGraph.CreateAsync(settings).GetResults(); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = graph.CreateDeviceOutputNodeAsync().GetResults(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output return; } deviceOutputNode = deviceOutputResult.DeviceOutputNode; }
private async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; graph.EncodingProperties.SampleRate = 44100; CreateAudioDeviceOutputNodeResult deviceOutputResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputResult.DeviceOutputNode; NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutput = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); }
public async Task InitializeSounds() { soundBankInitializer = new SoundBanksInitializer(); POneInputNodes = new ObservableCollection <AudioFileInputNode>(); PTwoInputNodes = new ObservableCollection <AudioFileInputNode>(); WOneInputNodes = new ObservableCollection <AudioFileInputNode>(); WTwoInputNodes = new ObservableCollection <AudioFileInputNode>(); InputNodesList = new ObservableCollection <ObservableCollection <AudioFileInputNode> >(); InputNodesList.Add(POneInputNodes); InputNodesList.Add(PTwoInputNodes); InputNodesList.Add(WOneInputNodes); InputNodesList.Add(WTwoInputNodes); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { graph = result.Graph; // create the output device CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); // make sure the audio output is available if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); foreach (SoundBank soundBank in soundBankInitializer.SoundBanks) { foreach (string fileName in soundBank.FileNames[0]) { await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName); InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]); } foreach (string fileName in soundBank.FileNames[1]) { await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName); FileInputNodesDictionary[fileName].LoopCount = null; InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]); } } InitializeEffects(); graph.Start(); } } }
private async Task InitializeAudioAsync() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.EncodingProperties = AudioEncodingProperties.CreatePcm(22050, 1, 16); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties); _frameInputNode.AddOutgoingConnection(_deviceOutputNode); _frameOutputNode = _graph.CreateFrameOutputNode(nodeEncodingProperties); _deviceInputNode.AddOutgoingConnection(_frameOutputNode); // Initialize the Frame Input Node in the stopped state _frameInputNode.Stop(); // Hook up an event handler so we can start generating samples when needed // This event is triggered when the node is required to provide data _frameInputNode.QuantumStarted += node_QuantumStarted; _graph.QuantumProcessed += GraphOnQuantumProcessed; // Start the graph since we will only start/stop the frame input node _graph.Start(); }
private async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; settings.PrimaryRenderDevice = outputDevices[outputDevicesListBox.SelectedIndex - 1]; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage); // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); outputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage); outputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage); inputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage); inputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Since graph is successfully created, enable the button to select a file output fileButton.IsEnabled = true; // Disable the graph button to prevent accidental click createGraphButton.IsEnabled = false; // Because we are using lowest latency setting, we need to handle device disconnection errors graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; }
public static async Task AudioDevices() { if (beep == null) { beep = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Audio/beep.wav")); } DeviceInformationCollection devices = await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(Windows.Media.Devices.MediaDevice.GetAudioRenderSelector()); DeviceInformation selectedDevice = null; // Sometimes usb mics are confused as speakers.. so add check to make Speakers (Raspberry Pi 2 audio) the default <-- Confirmed works on rpi3 //for (int i = 0; i <= devices.Count; i++) //{ // if (i != devices.Count) // { // if (DeviceTypeInformation.IsRaspberryPi) // { // if (devices[i].Name == "Speakers (Raspberry Pi 2 audio)") // { // selectedDevice = devices[i]; // break; // } // } // else // { // selectedDevice = devices[i]; // break; // } // } //} settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.PrimaryRenderDevice = selectedDevice; CreateAudioGraphResult resultg = await AudioGraph.CreateAsync(settings); audioflow = resultg.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await audioflow.CreateDeviceOutputNodeAsync(); CreateAudioDeviceInputNodeResult deviceInputNodeResult = await audioflow.CreateDeviceInputNodeAsync(MediaCategory.Media); deviceOuput = deviceOutputNodeResult.DeviceOutputNode; try { _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(); _mediaCapture.Failed += _mediaCapture_Failed; _mediaCapture.AudioDeviceController.VolumePercent = 0.75f; } catch { Debug.WriteLine("Failed to setup microphone is one connected?"); } }
public async Task InitializeAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); this._audioGraph = result.Graph; CreateAudioDeviceOutputNodeResult outputDeviceNodeResult = await this._audioGraph.CreateDeviceOutputNodeAsync(); _deviceOutputNode = outputDeviceNodeResult.DeviceOutputNode; }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; settings.DesiredSamplesPerQuantum = bufferLength; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; Debug.WriteLine($"Set samples per quantum to {graph.SamplesPerQuantum}"); // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; CreateNotes(noteCount, nodeEncodingProperties, deviceOutputNode); double lowNote = inputNotes.Values.OrderBy(p => p.frequency).First().frequency; double hiNote = inputNotes.Values.OrderByDescending(p => p.frequency).First().frequency; noteCount = inputNotes.Keys.Count; var mSLength = 1000.0 * (double)bufferLength / 48000.0; setupDescription = $"playing {noteCount} notes in {noteCount / 3} octaves ({lowNote:0.0} -> {hiNote:0.0}), {graph.SamplesPerQuantum} samples, {mSLength:0.0}mS buffers"; DetailText.Text = setupDescription; frameContainer.Background = new SolidColorBrush(Colors.Green); // Start the graph since we will only start/stop the frame input node graph.Start(); }
/// <summary> /// Create a node to output audio data to the default audio device (e.g. soundcard) /// </summary> private async Task CreateDefaultDeviceOutputNode() { CreateAudioDeviceOutputNodeResult result = await _graph.CreateDeviceOutputNodeAsync(); if (result.Status != AudioDeviceNodeCreationStatus.Success) { throw new Exception(result.Status.ToString()); } _deviceOutputNode = result.DeviceOutputNode; }
private async Task CreateDeviceOutputNode() { CreateAudioDeviceOutputNodeResult result = await audioGraph.CreateDeviceOutputNodeAsync(); if (result.Status != AudioDeviceNodeCreationStatus.Success) { throw new Exception($"Could not create device output node. { Logger.GetExceptionLocalization(this) }"); } deviceOutput = result.DeviceOutputNode; }
public async Task <AudioGraph> CreateAudioGraph(string filePath) { AudioGraph audioGraph; AudioDeviceOutputNode deviceOutputNode; // Create an AudioGraph with default setting AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.GameEffects); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { Debug.WriteLine("Could not create an audio graph"); return(null); } audioGraph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await audioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { Debug.WriteLine("Could not create an output node"); return(null); } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; AudioFileInputNode fileInput; // @"Audio\correctAnswerPlayer2.wav" var file = await Windows.ApplicationModel.Package.Current.InstalledLocation.GetFileAsync(filePath); if (file == null) { return(null); } CreateAudioFileInputNodeResult fileInputResult = await audioGraph.CreateFileInputNodeAsync(file); if (AudioFileNodeCreationStatus.Success != fileInputResult.Status) { // Cannot read input file Debug.WriteLine($"Cannot read input file because {fileInputResult.Status.ToString()}"); return(null); } fileInput = fileInputResult.FileInputNode; fileInput.AddOutgoingConnection(deviceOutputNode); return(audioGraph); }
public async Task InitializeAsync() { DebugUtil.CheckAppThread(); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); // settings.DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw; settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); DebugUtil.Assert(result.Status == AudioGraphCreationStatus.Success, "Failed to create audio graph"); _audioGraph = result.Graph; int latencyInSamples = _audioGraph.LatencyInSamples; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = _audioGraph.CreateDeviceOutputNodeAsync().GetResults(); DebugUtil.Assert(deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success, $"Audio Device Output unavailable because {deviceOutputNodeResult.Status}"); _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; _inputCaptureNode = _audioGraph.CreateFrameOutputNode(); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); DebugUtil.Assert(deviceInputNodeResult.Status == AudioDeviceNodeCreationStatus.Success, $"Audio Device Input unavailable because {deviceInputNodeResult.Status}"); _deviceInputNode = deviceInputNodeResult.DeviceInputNode; _deviceInputNode.AddOutgoingConnection(_inputCaptureNode); _deviceInputNode.AddOutgoingConnection(_deviceOutputNode); /* * echoEffect = new EchoEffectDefinition(_graph); * echoEffect.WetDryMix = 0.7f; * echoEffect.Feedback = 0.5f; * echoEffect.Delay = 500.0f; * submixNode.EffectDefinitions.Add(echoEffect); * * // Disable the effect in the beginning. Enable in response to user action (UI toggle switch) * submixNode.DisableEffectsByDefinition(echoEffect); */ // All nodes can have an OutgoingGain property // Setting the gain on the Submix node attenuates the output of the node //_submixNode.OutgoingGain = 0.5; }
private async Task CreateDeviceOutputNodeAsync() { // Create a device output node CreateAudioDeviceOutputNodeResult result = await audioGraph.CreateDeviceOutputNodeAsync(); if (result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node throw new InvalidOperationException("Can't init output device"); } deviceOutputNode = result.DeviceOutputNode; }
private async Task CreateAudioGraph() { // Create an AudioGraph with default setting AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Can't create the graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); submixNode = graph.CreateSubmixNode(); submixNodeContainer.Background = new SolidColorBrush(Colors.Green); submixNode.AddOutgoingConnection(deviceOutputNode); echoEffect = new EchoEffectDefinition(graph); echoEffect.WetDryMix = 0.7f; echoEffect.Feedback = 0.5f; echoEffect.Delay = 500.0f; submixNode.EffectDefinitions.Add(echoEffect); // Disable the effect in the beginning. Enable in response to user action (UI toggle switch) submixNode.DisableEffectsByDefinition(echoEffect); // All nodes can have an OutgoingGain property // Setting the gain on the Submix node attenuates the output of the node submixNode.OutgoingGain = 0.5; // Graph successfully created. Enable buttons to load files fileButton1.IsEnabled = true; fileButton2.IsEnabled = true; }
//</SnippetCreateDeviceInputNode> //<SnippetCreateDeviceOutputNode> private async Task CreateDeviceOutputNode() { // Create a device output node CreateAudioDeviceOutputNodeResult result = await audioGraph.CreateDeviceOutputNodeAsync(); if (result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node ShowErrorMessage(result.Status.ToString()); return; } deviceOutputNode = result.DeviceOutputNode; }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph _rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output _rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } _deviceOutputNode = deviceOutputResult.DeviceOutputNode; _rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Media); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node _rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage); return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; var frameOutputNode = _graph.CreateFrameOutputNode(); _graph.QuantumProcessed += AudioGraph_QuantumProcessed; _deviceInputNode.AddOutgoingConnection(frameOutputNode); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media) { //CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency }; var outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector()); foreach (var device in outputDevices) { Debug.WriteLine(device.Name); if (device.Name.Contains("Halide")) { settings.PrimaryRenderDevice = device; } } //settings.PrimaryRenderDevice = outputDevices[]; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph //rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node //rootPage.NotifyUser(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); //speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutput = deviceOutputNodeResult.DeviceOutputNode; //rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); //speakerContainer.Background = new SolidColorBrush(Colors.Green); }
async Task createDeviceOutputNode() { CreateAudioDeviceOutputNodeResult result = await audioGraph.CreateDeviceOutputNodeAsync(); if (result.Status == AudioDeviceNodeCreationStatus.Success) { deviceOutputNode = result.DeviceOutputNode; deviceOutputNode.OutgoingGain = 1.35; } else { audioGraph.Dispose(); audioGraph = null; } }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties); frameInputNode.AddOutgoingConnection(deviceOutputNode); frameContainer.Background = new SolidColorBrush(Colors.Green); // Initialize the Frame Input Node in the stopped state frameInputNode.Stop(); // Hook up an event handler so we can start generating samples when needed // This event is triggered when the node is required to provide data frameInputNode.QuantumStarted += node_QuantumStarted; // Start the graph since we will only start/stop the frame input node graph.Start(); }
private async Task CreateAudioGraph(uint samplingRate) { // Create an AudioGraph with default settings var encoding = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto); encoding.Audio = AudioEncodingProperties.CreatePcm(samplingRate, 1, 16); var settings = new AudioGraphSettings(AudioRenderCategory.Speech); settings.EncodingProperties = encoding.Audio; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } graph = result.Graph; graph.EncodingProperties.SampleRate = samplingRate; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties); frameInputNode.AddOutgoingConnection(deviceOutputNode); // Initialize the Frame Input Node in the stopped state frameInputNode.Stop(); frameInputNode.AudioFrameCompleted += FrameInputNode_AudioFrameCompleted; //frameInputNode.QuantumStarted += node_QuantumStarted; // Start the graph since we will only start/stop the frame input node graph.Start(); }
// Create the (device) output node private async Task CreateOutputNodeAsync() { if (audioGraph == null) { return; } CreateAudioDeviceOutputNodeResult result = await audioGraph.CreateDeviceOutputNodeAsync(); if (result == null) { return; } if (result.Status != AudioDeviceNodeCreationStatus.Success) { return; } outputNode = result.DeviceOutputNode; }
private async Task StartAudioAsync() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; try { CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } _audioGraph = result.Graph; // Create a device input node CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, _audioGraph.EncodingProperties, _audioInputSelected); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; _deviceInputNode.AddOutgoingConnection(_deviceOutputNode); _audioGraph.Start(); } catch (Exception ex) { Debug.WriteLine($"AudioGraph initialization failed. {ex?.Message}"); } }
private async void StartButton_Click(object sender, RoutedEventArgs e) { DeviceInformation SelectedDevice = DevicesBox.SelectedItem as DeviceInformation; AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media) { QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency }; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); AudioDeviceOutputNode deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other, graph.EncodingProperties, SelectedDevice); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node System.Diagnostics.Debug.WriteLine(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString())); return; } AudioDeviceInputNode deviceInputNode = deviceInputNodeResult.DeviceInputNode; frameOutputNode = graph.CreateFrameOutputNode(); deviceInputNode.AddOutgoingConnection(frameOutputNode); AudioFrameInputNode frameInputNode = graph.CreateFrameInputNode(); frameInputNode.AddOutgoingConnection(deviceOutputNode); // Attach to QuantumStarted event in order to receive synchronous updates from audio graph (to capture incoming audio). graph.QuantumStarted += GraphOnQuantumProcessed; graph.Start(); }
private async void Page_Loaded(object sender, RoutedEventArgs e) { // midi var s = MidiInPort.GetDeviceSelector(); var information = await DeviceInformation.FindAllAsync(s); var list = information.ToList(); port = await MidiInPort.FromIdAsync(list.ElementAt(2).Id); port.MessageReceived += Port_MessageReceived; // audio var settings = new AudioGraphSettings(AudioRenderCategory.GameEffects); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; var creation = await AudioGraph.CreateAsync(settings); graph = creation.Graph; output = await graph.CreateDeviceOutputNodeAsync(); var encoding = graph.EncodingProperties; encoding.ChannelCount = 1; input = graph.CreateFrameInputNode(encoding); input.AddOutgoingConnection(output.DeviceOutputNode); input.Stop(); input.QuantumStarted += Input_QuantumStarted; graph.Start(); // midi notes (pitch to note) float a = 440; // a is 440 hz... for (int x = 0; x < 127; ++x) { notes[x] = (a / 32f) * (float)Math.Pow(2f, ((x - 9f) / 12f)); } }