//<SnippetCreateSubmixNode> private void CreateSubmixNode() { AudioSubmixNode submixNode = audioGraph.CreateSubmixNode(); fileInputNode.AddOutgoingConnection(submixNode); frameInputNode.AddOutgoingConnection(submixNode); submixNode.AddOutgoingConnection(fileOutputNode); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph throw new Exception("error"); } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync(); _subMixNode = _graph.CreateSubmixNode(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output throw new Exception("error"); } _deviceOutputNode = deviceOutputResult.DeviceOutputNode; _subMixNode.AddOutgoingConnection(_deviceOutputNode); }
private void ApplyEffects(Effects e) { if (reverbNode == null) { reverbNode = audioGraph.CreateSubmixNode(); } if (eqNode == null) { eqNode = audioGraph.CreateSubmixNode(); } if (limiterNode == null) { limiterNode = audioGraph.CreateSubmixNode(); } if (eqEffect == null) { InitEffects(); } reverbNode.EffectDefinitions.Clear(); eqNode.EffectDefinitions.Clear(); limiterNode.EffectDefinitions.Clear(); if (e.HasFlag(Effects.Equalizer)) { eqNode.EffectDefinitions.Add(eqEffect); eqNode.EnableEffectsByDefinition(eqEffect); } if (e.HasFlag(Effects.Limiter)) { limiterNode.EffectDefinitions.Add(limiterEffect); limiterNode.EnableEffectsByDefinition(limiterEffect); } if (e.HasFlag(Effects.Reverb)) { reverbNode.EffectDefinitions.Add(reverbEffect); reverbNode.EnableEffectsByDefinition(reverbEffect); } }
private void ConfigureSubmix() { //Create the submix node for the reverb _submixNode = _graph.CreateSubmixNode(); _submixNode.OutgoingGain = 0.125d; //Add the submix node to the DeviceOutput. _submixNode.AddOutgoingConnection(_deviceOutput); //Add Reverb to the Submix ReverbEffectDefinition reverb = ReverbEffectDefinitionFactory.GetReverbEffectDefinition(_graph, ReverbEffectDefinitions.SmallRoom); _submixNode.EffectDefinitions.Add(reverb); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default setting AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Can't create the graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); submixNode = graph.CreateSubmixNode(); submixNodeContainer.Background = new SolidColorBrush(Colors.Green); submixNode.AddOutgoingConnection(deviceOutputNode); echoEffect = new EchoEffectDefinition(graph); echoEffect.WetDryMix = 0.7f; echoEffect.Feedback = 0.5f; echoEffect.Delay = 500.0f; submixNode.EffectDefinitions.Add(echoEffect); // Disable the effect in the beginning. Enable in response to user action (UI toggle switch) submixNode.DisableEffectsByDefinition(echoEffect); // All nodes can have an OutgoingGain property // Setting the gain on the Submix node attenuates the output of the node submixNode.OutgoingGain = 0.5; // Graph successfully created. Enable buttons to load files fileButton1.IsEnabled = true; fileButton2.IsEnabled = true; }
public async Task InitializeAsync() { DebugUtil.CheckAppThread(); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); // settings.DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw; settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); DebugUtil.Assert(result.Status == AudioGraphCreationStatus.Success, "Failed to create audio graph"); _audioGraph = result.Graph; int latencyInSamples = _audioGraph.LatencyInSamples; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = _audioGraph.CreateDeviceOutputNodeAsync().GetResults(); DebugUtil.Assert(deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success, $"Audio Device Output unavailable because {deviceOutputNodeResult.Status}"); _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; _inputCaptureSubmixNode = _audioGraph.CreateSubmixNode(); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); DebugUtil.Assert(deviceInputNodeResult.Status == AudioDeviceNodeCreationStatus.Success, $"Audio Device Input unavailable because {deviceInputNodeResult.Status}"); _deviceInputNode = deviceInputNodeResult.DeviceInputNode; _deviceInputNode.AddOutgoingConnection(_inputCaptureSubmixNode); /* * echoEffect = new EchoEffectDefinition(_graph); * echoEffect.WetDryMix = 0.7f; * echoEffect.Feedback = 0.5f; * echoEffect.Delay = 500.0f; * submixNode.EffectDefinitions.Add(echoEffect); * * // Disable the effect in the beginning. Enable in response to user action (UI toggle switch) * submixNode.DisableEffectsByDefinition(echoEffect); */ // All nodes can have an OutgoingGain property // Setting the gain on the Submix node attenuates the output of the node //_submixNode.OutgoingGain = 0.5; }
private async void Play_OnClick(object sender, RoutedEventArgs e) { await CreateGraph(); await CreateDefaultDeviceOutputNode(); await CreateFileInputNode(); // Create submix node _submixNode = _graph.CreateSubmixNode(); AddCustomEcho(); ConnectNodes(); _graph.Start(); }
// // // public async Task InitializeAsync() { audGraphResult = await AudioGraph.CreateAsync(audGraphSettings); // audGraph = audGraphResult.Graph; // // // deviceOutputNodeResult = await audGraph.CreateDeviceOutputNodeAsync(); // deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // // // //deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); // deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other, audGraph.EncodingProperties); deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other, audGraph.EncodingProperties, inputDevice); // deviceInputNode = deviceInputNodeResult.DeviceInputNode; // // // audioDeviceOutputSubmixNode = audGraph.CreateSubmixNode(); // // // deviceInputNode.AddOutgoingConnection(audioDeviceOutputSubmixNode); // audioDeviceOutputSubmixNode.AddOutgoingConnection(deviceOutputNode); // // // CreateEchoEffect(); CreateReverbEffect(); CreateLimiterEffect(); CreateEqEffect(); }
private async Task CreateAudioGraph() { // Create an AudioGraph with default setting AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Can't create the graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); submixNode = graph.CreateSubmixNode(); subMixNode.Background = new SolidColorBrush(Colors.Green); submixNode.AddOutgoingConnection(deviceOutputNode); echoEffect = new EchoEffectDefinition(graph); echoEffect.WetDryMix = 0.7f; echoEffect.Feedback = 0.5f; echoEffect.Delay = 500.0f; submixNode.EffectDefinitions.Add(echoEffect); // Disable the effect in the beginning. Enable in response to user action (UI toggle switch) submixNode.DisableEffectsByDefinition(echoEffect); // All nodes can have an OutgoingGain property // Setting the gain on the Submix node attenuates the output of the node submixNode.OutgoingGain = 0.5; // Graph successfully created. Enable buttons to load files fileButton1.IsEnabled = true; fileButton2.IsEnabled = true; }