public async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; settings.PrimaryRenderDevice = outputDevices[0]; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { MessageDialog md = new MessageDialog("Error in creating Audio Graph", "OOPS!!"); await md.ShowAsync(); return; } graph = result.Graph; CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { MessageDialog md = new MessageDialog("Error in creating InputDeviceNode", "OOPS!!"); await md.ShowAsync(); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; }
private async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; settings.PrimaryRenderDevice = outputDevices[outputDevicesListBox.SelectedIndex - 1]; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage); // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); outputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage); outputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage); inputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage); inputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Since graph is successfully created, enable the button to select a file output fileButton.IsEnabled = true; // Disable the graph button to prevent accidental click createGraphButton.IsEnabled = false; // Because we are using lowest latency setting, we need to handle device disconnection errors graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; }
private async Task InitializeAudioAsync() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.EncodingProperties = AudioEncodingProperties.CreatePcm(22050, 1, 16); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties); _frameInputNode.AddOutgoingConnection(_deviceOutputNode); _frameOutputNode = _graph.CreateFrameOutputNode(nodeEncodingProperties); _deviceInputNode.AddOutgoingConnection(_frameOutputNode); // Initialize the Frame Input Node in the stopped state _frameInputNode.Stop(); // Hook up an event handler so we can start generating samples when needed // This event is triggered when the node is required to provide data _frameInputNode.QuantumStarted += node_QuantumStarted; _graph.QuantumProcessed += GraphOnQuantumProcessed; // Start the graph since we will only start/stop the frame input node _graph.Start(); }
public static async Task AudioDevices() { if (beep == null) { beep = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Audio/beep.wav")); } DeviceInformationCollection devices = await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(Windows.Media.Devices.MediaDevice.GetAudioRenderSelector()); DeviceInformation selectedDevice = null; // Sometimes usb mics are confused as speakers.. so add check to make Speakers (Raspberry Pi 2 audio) the default <-- Confirmed works on rpi3 //for (int i = 0; i <= devices.Count; i++) //{ // if (i != devices.Count) // { // if (DeviceTypeInformation.IsRaspberryPi) // { // if (devices[i].Name == "Speakers (Raspberry Pi 2 audio)") // { // selectedDevice = devices[i]; // break; // } // } // else // { // selectedDevice = devices[i]; // break; // } // } //} settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.PrimaryRenderDevice = selectedDevice; CreateAudioGraphResult resultg = await AudioGraph.CreateAsync(settings); audioflow = resultg.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await audioflow.CreateDeviceOutputNodeAsync(); CreateAudioDeviceInputNodeResult deviceInputNodeResult = await audioflow.CreateDeviceInputNodeAsync(MediaCategory.Media); deviceOuput = deviceOutputNodeResult.DeviceOutputNode; try { _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(); _mediaCapture.Failed += _mediaCapture_Failed; _mediaCapture.AudioDeviceController.VolumePercent = 0.75f; } catch { Debug.WriteLine("Failed to setup microphone is one connected?"); } }
private async Task CreateAudioGraph() { if (graph != null) { graph.Dispose(); } AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.SystemDefault; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } graph = result.Graph; // Create a device input node using the default audio input device (manifest microphone!!!!) CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; //creating file StorageFolder storageFolder = Windows.Storage.ApplicationData.Current.LocalFolder; StorageFile file = await storageFolder.CreateFileAsync("sample.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting); path = file.Path.ToString(); MediaEncodingProfile fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium); // Operate node at the graph format, but save file at the specified format CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile); if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success) { // FileOutputNode creation failed return; } fileOutputNode = fileOutputNodeResult.FileOutputNode; // Connect the input node to both output nodes deviceInputNode.AddOutgoingConnection(fileOutputNode); }
public async Task InitializeAsync() { DebugUtil.CheckAppThread(); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); // settings.DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw; settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); DebugUtil.Assert(result.Status == AudioGraphCreationStatus.Success, "Failed to create audio graph"); _audioGraph = result.Graph; int latencyInSamples = _audioGraph.LatencyInSamples; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = _audioGraph.CreateDeviceOutputNodeAsync().GetResults(); DebugUtil.Assert(deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success, $"Audio Device Output unavailable because {deviceOutputNodeResult.Status}"); _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; _inputCaptureNode = _audioGraph.CreateFrameOutputNode(); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); DebugUtil.Assert(deviceInputNodeResult.Status == AudioDeviceNodeCreationStatus.Success, $"Audio Device Input unavailable because {deviceInputNodeResult.Status}"); _deviceInputNode = deviceInputNodeResult.DeviceInputNode; _deviceInputNode.AddOutgoingConnection(_inputCaptureNode); _deviceInputNode.AddOutgoingConnection(_deviceOutputNode); /* * echoEffect = new EchoEffectDefinition(_graph); * echoEffect.WetDryMix = 0.7f; * echoEffect.Feedback = 0.5f; * echoEffect.Delay = 500.0f; * submixNode.EffectDefinitions.Add(echoEffect); * * // Disable the effect in the beginning. Enable in response to user action (UI toggle switch) * submixNode.DisableEffectsByDefinition(echoEffect); */ // All nodes can have an OutgoingGain property // Setting the gain on the Submix node attenuates the output of the node //_submixNode.OutgoingGain = 0.5; }
public static async Task CreateDeviceInputNode() { Console.WriteLine("Creating AudioGraphs"); // Create an AudioGraph with default settings AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.GameChat); graphsettings.EncodingProperties = new AudioEncodingProperties(); graphsettings.EncodingProperties.Subtype = "Float"; graphsettings.EncodingProperties.SampleRate = 48000; graphsettings.EncodingProperties.ChannelCount = 2; graphsettings.EncodingProperties.BitsPerSample = 32; graphsettings.EncodingProperties.Bitrate = 3072000; //settings.DesiredSamplesPerQuantum = 960; //settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings); if (graphresult.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } ingraph = graphresult.Graph; AudioGraphSettings nodesettings = new AudioGraphSettings(AudioRenderCategory.GameChat); nodesettings.EncodingProperties = AudioEncodingProperties.CreatePcm(48000, 2, 16); nodesettings.DesiredSamplesPerQuantum = 960; nodesettings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; frameOutputNode = ingraph.CreateFrameOutputNode(outgraph.EncodingProperties); quantum = 0; ingraph.QuantumStarted += Graph_QuantumStarted; Windows.Devices.Enumeration.DeviceInformation selectedDevice = await Windows.Devices.Enumeration.DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioCaptureId(Windows.Media.Devices.AudioDeviceRole.Default)); CreateAudioDeviceInputNodeResult result = await ingraph.CreateDeviceInputNodeAsync(MediaCategory.Media, nodesettings.EncodingProperties, selectedDevice); if (result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node return; } deviceInputNode = result.DeviceInputNode; deviceInputNode.AddOutgoingConnection(frameOutputNode); frameOutputNode.Start(); ingraph.Start(); }
private async Task openMicrophonePopup() { AudioGraphSettings settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); AudioGraph audioGraph = result.Graph; CreateAudioDeviceInputNodeResult resultNode = await audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media); AudioDeviceInputNode deviceInputNode = resultNode.DeviceInputNode; deviceInputNode.Dispose(); audioGraph.Dispose(); }
//</SnippetCreateDeviceOutputNode> private async Task EnumerateAudioCaptureDevices() { //<SnippetEnumerateAudioCaptureDevices> Windows.Devices.Enumeration.DeviceInformationCollection devices = await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(Windows.Media.Devices.MediaDevice.GetAudioCaptureSelector()); // Show UI to allow the user to select a device Windows.Devices.Enumeration.DeviceInformation selectedDevice = ShowMyDeviceSelectionUI(devices); CreateAudioDeviceInputNodeResult result = await audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media, audioGraph.EncodingProperties, selectedDevice); //</SnippetEnumerateAudioCaptureDevices> }
//</SnippetCreateFileOutputNode> //<SnippetCreateDeviceInputNode> private async Task CreateDeviceInputNode() { // Create a device output node CreateAudioDeviceInputNodeResult result = await audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media); if (result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node ShowErrorMessage(result.Status.ToString()); return; } deviceInputNode = result.DeviceInputNode; }
private async Task CreateAudioGraph() { // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph _rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } _graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync(); if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output _rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); return; } _deviceOutputNode = deviceOutputResult.DeviceOutputNode; _rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Media); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node _rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage); return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; var frameOutputNode = _graph.CreateFrameOutputNode(); _graph.QuantumProcessed += AudioGraph_QuantumProcessed; _deviceInputNode.AddOutgoingConnection(frameOutputNode); }
private async Task StartAudioAsync() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; try { CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } _audioGraph = result.Graph; // Create a device input node CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, _audioGraph.EncodingProperties, _audioInputSelected); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; _deviceInputNode.AddOutgoingConnection(_deviceOutputNode); _audioGraph.Start(); } catch (Exception ex) { Debug.WriteLine($"AudioGraph initialization failed. {ex?.Message}"); } }
private async void StartButton_Click(object sender, RoutedEventArgs e) { DeviceInformation SelectedDevice = DevicesBox.SelectedItem as DeviceInformation; AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media) { QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency }; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); AudioDeviceOutputNode deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other, graph.EncodingProperties, SelectedDevice); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node System.Diagnostics.Debug.WriteLine(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString())); return; } AudioDeviceInputNode deviceInputNode = deviceInputNodeResult.DeviceInputNode; frameOutputNode = graph.CreateFrameOutputNode(); deviceInputNode.AddOutgoingConnection(frameOutputNode); AudioFrameInputNode frameInputNode = graph.CreateFrameInputNode(); frameInputNode.AddOutgoingConnection(deviceOutputNode); // Attach to QuantumStarted event in order to receive synchronous updates from audio graph (to capture incoming audio). graph.QuantumStarted += GraphOnQuantumProcessed; graph.Start(); }
private async void CreateAudioGraphAsync() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media) { //settings.DesiredSamplesPerQuantum = fftLength; DesiredRenderDeviceAudioProcessing = AudioProcessing.Default, QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired }; CreateAudioGraphResult graphResult = await AudioGraph.CreateAsync(settings); if (graphResult.Status != AudioGraphCreationStatus.Success) { throw new InvalidOperationException($"Graph creation failed {graphResult.Status}"); } _graph = graphResult.Graph; //CreateAudioDeviceInputNodeResult inputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Media); CreateAudioDeviceInputNodeResult inputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (inputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { _inputNode = inputNodeResult.DeviceInputNode; _frameOutputNode = _graph.CreateFrameOutputNode(); _inputNode.AddOutgoingConnection(_frameOutputNode); _frameOutputNode.Start(); _graph.QuantumProcessed += AudioGraph_QuantumProcessed; // Because we are using lowest latency setting, we need to handle device disconnection errors _graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; _graph.Start(); } else { MessageDialog md = new MessageDialog("Cannot access microphone"); await md.ShowAsync(); } }
/// <summary> /// Tyrs to creates the frame output node and trys to set the outgoing connection to it. Also calculates audioFrameUpdateMinimum. /// </summary> /// <returns>Whether or not the attempt was successful</returns> private static async Task <bool> CreateNodes() { try { CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); deviceInputNode = deviceInputNodeResult.DeviceInputNode; frameOutputNode = graph.CreateFrameOutputNode(graph.EncodingProperties); graph.QuantumStarted += Graph_QuantumStarted; audioFrameUpdateMinimum = Convert.ToInt32(samplesPerQuantumLimit / graph.SamplesPerQuantum); deviceInputNode.AddOutgoingConnection(frameOutputNode); return(true); } catch (Exception) { return(false); } }
// // // public async Task InitializeAsync() { audGraphResult = await AudioGraph.CreateAsync(audGraphSettings); // audGraph = audGraphResult.Graph; // // // deviceOutputNodeResult = await audGraph.CreateDeviceOutputNodeAsync(); // deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // // // //deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); // deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other, audGraph.EncodingProperties); deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other, audGraph.EncodingProperties, inputDevice); // deviceInputNode = deviceInputNodeResult.DeviceInputNode; // // // audioDeviceOutputSubmixNode = audGraph.CreateSubmixNode(); // // // deviceInputNode.AddOutgoingConnection(audioDeviceOutputSubmixNode); // audioDeviceOutputSubmixNode.AddOutgoingConnection(deviceOutputNode); // // // CreateEchoEffect(); CreateReverbEffect(); CreateLimiterEffect(); CreateEqEffect(); }
private async void Init() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { mainPage.MessageBox("Could not create input device for Mic To MIDI!"); return; } audioGraph = result.Graph; CreateAudioDeviceInputNodeResult deviceInputNodeResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { mainPage.MessageBox(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString())); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; frameOutputNode = audioGraph.CreateFrameOutputNode(); deviceInputNode.AddOutgoingConnection(frameOutputNode); //audioGraph.QuantumStarted += AudioGraph_QuantumStarted; audioGraph.Start(); deviceInputNode.Start(); frameOutputNode.Start(); timer = new DispatcherTimer(); timer.Interval = new TimeSpan(0, 0, 0, 0, 1); // 1 ms timer.Tick += Timer_Tick; timer.Start(); periodLengthUK101 = 0; }
private void OnCreateInputCompleted(IAsyncOperation <CreateAudioDeviceInputNodeResult> asyncInfo, AsyncStatus asyncStatus) { if (asyncStatus == AsyncStatus.Completed) { CreateAudioDeviceInputNodeResult result = asyncInfo.GetResults(); if (result.Status == AudioDeviceNodeCreationStatus.Success) { this.m_AudioDevideInputNode = result.DeviceInputNode; this.m_AudioFrameOutputNode = this.m_AudioGraph.CreateFrameOutputNode(); this.m_AudioDevideInputNode.AddOutgoingConnection(this.m_AudioFrameOutputNode); this.m_AudioGraph.QuantumStarted += M_AudioGraph_QuantumStarted; //this.m_AudioGraph.QuantumProcessed += M_AudioGraph_QuantumProcessed; this.m_AudioGraph.Start(); } else { ShowMessage($"Failed to create audio device input node: {result.Status}"); } } else { ShowMessage($"Failed to create audio device input node: {asyncStatus}"); } }
/// <summary> /// Create input audio graph /// </summary> /// <param name="deviceId">Override for default input device id</param> public static async Task <bool> CreateInputDeviceNode(string deviceId = null) { // If not in use, redo dispose if (ingraph != null && deviceId != InputDeviceID) { HeavyDisposeInGraph(); } // Increment use counter else { inGraphCount++; } Console.WriteLine("Creating AudioGraphs"); // Create an AudioGraph with default settings AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.Media); graphsettings.EncodingProperties = new AudioEncodingProperties(); graphsettings.EncodingProperties.Subtype = "Float"; graphsettings.EncodingProperties.SampleRate = 48000; graphsettings.EncodingProperties.ChannelCount = 2; graphsettings.EncodingProperties.BitsPerSample = 32; graphsettings.EncodingProperties.Bitrate = 3072000; CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings); if (graphresult.Status != AudioGraphCreationStatus.Success) { // Cannot create graph inGraphCount--; LocalState.VoiceState.SelfMute = true; VoiceManager.lockMute = true; return(false); } // "Save" graph ingraph = graphresult.Graph; // Create frameOutputNode AudioGraphSettings nodesettings = new AudioGraphSettings(AudioRenderCategory.GameChat); nodesettings.EncodingProperties = AudioEncodingProperties.CreatePcm(48000, 2, 32); nodesettings.DesiredSamplesPerQuantum = 960; nodesettings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; frameOutputNode = ingraph.CreateFrameOutputNode(ingraph.EncodingProperties); quantum = 0; ingraph.QuantumStarted += Graph_QuantumStarted; // Determine selected device DeviceInformation selectedDevice; if (deviceId == "Default" || deviceId == null) { string device = Windows.Media.Devices.MediaDevice.GetDefaultAudioCaptureId(Windows.Media.Devices.AudioDeviceRole.Default); if (!string.IsNullOrEmpty(device)) { selectedDevice = await DeviceInformation.CreateFromIdAsync(device); Windows.Media.Devices.MediaDevice.DefaultAudioCaptureDeviceChanged += MediaDevice_DefaultAudioCaptureDeviceChanged; } else { inGraphCount--; LocalState.VoiceState.SelfMute = true; VoiceManager.lockMute = true; return(false); } } else { try { selectedDevice = await DeviceInformation.CreateFromIdAsync(deviceId); } catch { selectedDevice = await DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioCaptureId(Windows.Media.Devices.AudioDeviceRole.Default)); deviceId = "Default"; } } CreateAudioDeviceInputNodeResult result = await ingraph.CreateDeviceInputNodeAsync(MediaCategory.Media, nodesettings.EncodingProperties, selectedDevice); if (result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node inGraphCount--; LocalState.VoiceState.SelfMute = true; VoiceManager.lockMute = true; return(false); } // Attach input device deviceInputNode = result.DeviceInputNode; deviceInputNode.AddOutgoingConnection(frameOutputNode); InputDeviceID = deviceId; // Begin playing frameOutputNode.Start(); ingraph.Start(); return(true); }
protected override async void OnNavigatedTo(NavigationEventArgs e) { var audioInputDevices = await DeviceInformation.FindAllAsync(DeviceClass.AudioCapture); foreach (var device in audioInputDevices) { if (device.Name.ToLower().Contains("usb")) { audioInput = device; break; } } if (audioInput == null) { Debug.WriteLine("Could not find USB audio card"); return; } var audioOutputDevices = await DeviceInformation.FindAllAsync(DeviceClass.AudioRender); foreach (var device in audioOutputDevices) { if (device.Name.ToLower().Contains("usb")) { audioOutput = device; } else { raspiAudioOutput = device; } } if (audioOutput == null) { Debug.WriteLine("Could not find USB audio card"); return; } // Set up LED strips await leftStrip.Begin(); await rightStrip.Begin(); //await AudioTest(); AudioGraphSettings audioGraphSettings = new AudioGraphSettings(AudioRenderCategory.Media); audioGraphSettings.DesiredSamplesPerQuantum = 440; audioGraphSettings.DesiredRenderDeviceAudioProcessing = AudioProcessing.Default; audioGraphSettings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; audioGraphSettings.PrimaryRenderDevice = raspiAudioOutput; CreateAudioGraphResult audioGraphResult = await AudioGraph.CreateAsync(audioGraphSettings); if (audioGraphResult.Status != AudioGraphCreationStatus.Success) { Debug.WriteLine("AudioGraph creation failed! " + audioGraphResult.Status); return; } audioGraph = audioGraphResult.Graph; //Debug.WriteLine(audioGraph.SamplesPerQuantum); CreateAudioDeviceInputNodeResult inputNodeResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, audioGraph.EncodingProperties, audioInput); if (inputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { Debug.WriteLine("AudioDeviceInputNode creation failed! " + inputNodeResult.Status); return; } AudioDeviceInputNode inputNode = inputNodeResult.DeviceInputNode; CreateAudioDeviceOutputNodeResult outputNodeResult = await audioGraph.CreateDeviceOutputNodeAsync(); if (outputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { Debug.WriteLine("AudioDeviceOutputNode creation failed!" + outputNodeResult.Status); } AudioDeviceOutputNode outputNode = outputNodeResult.DeviceOutputNode; frameOutputNode = audioGraph.CreateFrameOutputNode(); inputNode.AddOutgoingConnection(frameOutputNode); inputNode.AddOutgoingConnection(outputNode); cube.SetSpeedStripLedColors(LedColorLists.rainbowColors); audioGraph.QuantumProcessed += AudioGraph_QuantumProcessed; audioGraph.UnrecoverableErrorOccurred += AudioGraph_UnrecoverableErrorOccurred; audioGraph.Start(); outputNode.Start(); inputNode.Start(); frameOutputNode.Start(); cube.Reset(); cube.Update(); //await MathFunc(); //cube.ApplyColorFunction((x, y, z) => //{ // Color c = Color.FromArgb(255, // (byte)((x / 14.0) * 255.0), // (byte)((y / 14.0) * 255.0), // (byte)((z / 14.0) * 255.0)); // return c; //}); //cube.SetLedColors(); //cube.Update(); //cube.bottomFrontEdge.SetColor(Colors.Red); //cube.bottomRightEdge.SetColor(Colors.OrangeRed); //cube.bottomBackEdge.SetColor(Colors.Yellow); //cube.bottomLeftEdge.SetColor(Colors.Green); //cube.frontLeftEdge.SetColor(Colors.Blue); //cube.frontTopEdge.SetColor(Colors.Purple); //cube.rightLeftEdge.Brightness = 10; //cube.rightLeftEdge.SetColor(Colors.Red); //cube.rightTopEdge.Brightness = 10; //cube.rightTopEdge.SetColor(Colors.OrangeRed); //cube.backLeftEdge.Brightness = 10; //cube.backLeftEdge.SetColor(Colors.Yellow); //cube.backTopEdge.Brightness = 10; //cube.backTopEdge.SetColor(Colors.Green); //cube.leftLeftEdge.Brightness = 10; //cube.leftLeftEdge.SetColor(Colors.Blue); //cube.leftTopEdge.Brightness = 10; //cube.leftTopEdge.SetColor(Colors.Purple); //cube.Update(); //await RainbowTest(); //cube.Brightness = 30; //await FlashTest(); //SetAll(); //await FadeTest(); //cube.Reset(); //cube.Update(); //await cube.rightLeftEdge.DoLine(); //ZackTest(); }
public async Task InitializeUWPAudio() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector()); foreach (DeviceInformation dev in outputDevices) { if (dev.Name.Contains(OMENHeadset)) { settings.PrimaryRenderDevice = dev; } } CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } uwpAudioGraph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await uwpAudioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await uwpAudioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; #if false //For File recording not use now. FileSavePicker saveFilePicker = new FileSavePicker(); saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List <string>() { ".wav" }); saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>() { ".wma" }); saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List <string>() { ".mp3" }); saveFilePicker.SuggestedFileName = "New Audio Track"; StorageFile file = await saveFilePicker.PickSaveFileAsync(); MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file); // Operate node at the graph format, but save file at the specified format CreateAudioFileOutputNodeResult fileOutputNodeResult = await uwpAudioGraph.CreateFileOutputNodeAsync(file, fileProfile); if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success) { // FileOutputNode creation failed //rootPage.NotifyUser(String.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); //fileButton.Background = new SolidColorBrush(Colors.Red); return; } fileOutputNode = fileOutputNodeResult.FileOutputNode; deviceInputNode.AddOutgoingConnection(fileOutputNode); #endif // Connect the input node to both output nodes deviceInputNode.AddOutgoingConnection(deviceOutputNode); }