private async Task CreateAudioGraph(uint samplingRate) { // Create an AudioGraph with default settings var encoding = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto); encoding.Audio = AudioEncodingProperties.CreatePcm(samplingRate, 1, 16); var settings = new AudioGraphSettings(AudioRenderCategory.Speech); settings.EncodingProperties = encoding.Audio; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } graph = result.Graph; graph.EncodingProperties.SampleRate = samplingRate; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create the FrameInputNode at the same format as the graph, except explicitly set mono. AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 1; frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties); frameInputNode.AddOutgoingConnection(deviceOutputNode); // Initialize the Frame Input Node in the stopped state frameInputNode.Stop(); frameInputNode.AudioFrameCompleted += FrameInputNode_AudioFrameCompleted; //frameInputNode.QuantumStarted += node_QuantumStarted; // Start the graph since we will only start/stop the frame input node graph.Start(); }
/// <summary> /// Trys to create the audio graph. /// </summary> /// <returns>Whether or not the creation was successful</returns> private static async Task <bool> CreateAudioGraph() { try { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); graph = result.Graph; return(true); } catch (Exception) { return(false); } }
private async Task StartAudioAsync() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; try { CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } _audioGraph = result.Graph; // Create a device input node CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, _audioGraph.EncodingProperties, _audioInputSelected); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; _deviceInputNode.AddOutgoingConnection(_deviceOutputNode); _audioGraph.Start(); } catch (Exception ex) { Debug.WriteLine($"AudioGraph initialization failed. {ex?.Message}"); } }
private async void StartButton_Click(object sender, RoutedEventArgs e) { DeviceInformation SelectedDevice = DevicesBox.SelectedItem as DeviceInformation; AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media) { QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency }; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); graph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); AudioDeviceOutputNode deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other, graph.EncodingProperties, SelectedDevice); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node System.Diagnostics.Debug.WriteLine(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString())); return; } AudioDeviceInputNode deviceInputNode = deviceInputNodeResult.DeviceInputNode; frameOutputNode = graph.CreateFrameOutputNode(); deviceInputNode.AddOutgoingConnection(frameOutputNode); AudioFrameInputNode frameInputNode = graph.CreateFrameInputNode(); frameInputNode.AddOutgoingConnection(deviceOutputNode); // Attach to QuantumStarted event in order to receive synchronous updates from audio graph (to capture incoming audio). graph.QuantumStarted += GraphOnQuantumProcessed; graph.Start(); }
/// <summary> /// Setup an AudioGraph with PCM input node and output for media playback /// </summary> private async Task CreateAudioGraph() { AudioGraphSettings graphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media); CreateAudioGraphResult graphResult = await AudioGraph.CreateAsync(graphSettings); if (graphResult.Status != AudioGraphCreationStatus.Success) { UpdateUI(() => { this.Messages.Add(new MessageDisplay($"Error in AudioGraph construction: {graphResult.Status.ToString()}", Sender.Other)); }); } audioGraph = graphResult.Graph; CreateAudioDeviceOutputNodeResult outputResult = await audioGraph.CreateDeviceOutputNodeAsync(); if (outputResult.Status != AudioDeviceNodeCreationStatus.Success) { UpdateUI(() => { this.Messages.Add(new MessageDisplay($"Error in audio OutputNode construction: {outputResult.Status.ToString()}", Sender.Other)); }); } outputNode = outputResult.DeviceOutputNode; // Create the FrameInputNode using PCM format; 16kHz, 1 channel, 16 bits per sample AudioEncodingProperties nodeEncodingProperties = AudioEncodingProperties.CreatePcm(16000, 1, 16); frameInputNode = audioGraph.CreateFrameInputNode(nodeEncodingProperties); frameInputNode.AddOutgoingConnection(outputNode); // Initialize the FrameInputNode in the stopped state frameInputNode.Stop(); // Hook up an event handler so we can start generating samples when needed // This event is triggered when the node is required to provide data frameInputNode.QuantumStarted += node_QuantumStarted; audioGraph.Start(); }
private async void CreateAudioGraphAsync() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media) { //settings.DesiredSamplesPerQuantum = fftLength; DesiredRenderDeviceAudioProcessing = AudioProcessing.Default, QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired }; CreateAudioGraphResult graphResult = await AudioGraph.CreateAsync(settings); if (graphResult.Status != AudioGraphCreationStatus.Success) { throw new InvalidOperationException($"Graph creation failed {graphResult.Status}"); } _graph = graphResult.Graph; //CreateAudioDeviceInputNodeResult inputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Media); CreateAudioDeviceInputNodeResult inputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (inputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { _inputNode = inputNodeResult.DeviceInputNode; _frameOutputNode = _graph.CreateFrameOutputNode(); _inputNode.AddOutgoingConnection(_frameOutputNode); _frameOutputNode.Start(); _graph.QuantumProcessed += AudioGraph_QuantumProcessed; // Because we are using lowest latency setting, we need to handle device disconnection errors _graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; _graph.Start(); } else { MessageDialog md = new MessageDialog("Cannot access microphone"); await md.ShowAsync(); } }
public static async Task CreateDeviceOutputNode() { Console.WriteLine("Creating AudioGraphs"); // Create an AudioGraph with default settings AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.GameChat); graphsettings.EncodingProperties = new AudioEncodingProperties(); graphsettings.EncodingProperties.Subtype = "Float"; graphsettings.EncodingProperties.SampleRate = 48000; graphsettings.EncodingProperties.ChannelCount = 2; graphsettings.EncodingProperties.BitsPerSample = 32; graphsettings.EncodingProperties.Bitrate = 3072000; CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings); if (graphresult.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } outgraph = graphresult.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await outgraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create the FrameInputNode at the same format as the graph, except explicitly set stereo. frameInputNode = outgraph.CreateFrameInputNode(outgraph.EncodingProperties); frameInputNode.AddOutgoingConnection(deviceOutputNode); frameInputNode.Start(); ready = true; outgraph.Start(); }
// // // public async Task InitializeAsync() { audGraphResult = await AudioGraph.CreateAsync(audGraphSettings); // audGraph = audGraphResult.Graph; // // // deviceOutputNodeResult = await audGraph.CreateDeviceOutputNodeAsync(); // deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // // // //deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); // deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other, audGraph.EncodingProperties); deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other, audGraph.EncodingProperties, inputDevice); // deviceInputNode = deviceInputNodeResult.DeviceInputNode; // // // audioDeviceOutputSubmixNode = audGraph.CreateSubmixNode(); // // // deviceInputNode.AddOutgoingConnection(audioDeviceOutputSubmixNode); // audioDeviceOutputSubmixNode.AddOutgoingConnection(deviceOutputNode); // // // CreateEchoEffect(); CreateReverbEffect(); CreateLimiterEffect(); CreateEqEffect(); }
private async void Init() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { mainPage.MessageBox("Could not create input device for Mic To MIDI!"); return; } audioGraph = result.Graph; CreateAudioDeviceInputNodeResult deviceInputNodeResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { mainPage.MessageBox(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString())); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; frameOutputNode = audioGraph.CreateFrameOutputNode(); deviceInputNode.AddOutgoingConnection(frameOutputNode); //audioGraph.QuantumStarted += AudioGraph_QuantumStarted; audioGraph.Start(); deviceInputNode.Start(); frameOutputNode.Start(); timer = new DispatcherTimer(); timer.Interval = new TimeSpan(0, 0, 0, 0, 1); // 1 ms timer.Tick += Timer_Tick; timer.Start(); periodLengthUK101 = 0; }
/// <summary> /// 5. Build and Start the AudiGraph /// </summary> /// <returns></returns> private async Task BuildAndStartAudioGraph() { _spatialSoundsIdx = 0; //Initialize AudioGraph settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.GameEffects); settings.EncodingProperties = AudioEncodingProperties.CreatePcm(48000, 2, 32); settings.EncodingProperties.Subtype = MediaEncodingSubtypes.Float; settings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Raw; //Create AudioGraph CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { _graph = result.Graph; //Attach a DeviceOutput node to the AudioGraph CreateAudioDeviceOutputNodeResult deviceResult = await _graph.CreateDeviceOutputNodeAsync(); if (deviceResult.Status == AudioDeviceNodeCreationStatus.Success) { _deviceOutput = deviceResult.DeviceOutputNode; ConfigureSubmix(); await CreateEmitter(0); //Show the position of the active sound Canvas.SetLeft(this.ActiveSound, _spatialSounds[0].ImageResourceLocation.X * scaleFactor); Canvas.SetTop(this.ActiveSound, _spatialSounds[0].ImageResourceLocation.Y * scaleFactor); ActiveSound.Visibility = Visibility.Visible; //Start the graph _graph.Start(); GraphState = GraphStateEnum.Playing; } } }
//Check: https://docs.microsoft.com/en-us/windows/uwp/gaming/working-with-audio-in-your-directx-game public static async Task InitSound() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { graph = result.Graph; //Create output device for audio playing CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); //Check for succes if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success) { output = deviceOutputNodeResult.DeviceOutputNode; graph.ResetAllNodes(); graph.Start(); initialized = true; } } }
public async Task InitilizeAudioGraph(StorageFile file) { AudioGraphSettings settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { return; } audioGraph = result.Graph; if (audioGraph == null) { return; } CreateAudioFileInputNodeResult audioInputResult = await audioGraph.CreateFileInputNodeAsync(file); if (audioInputResult.Status != AudioFileNodeCreationStatus.Success) { return; } fileInputNode = audioInputResult.FileInputNode; CreateAudioDeviceOutputNodeResult audioOutputResult = await audioGraph.CreateDeviceOutputNodeAsync(); if (audioOutputResult.Status != AudioDeviceNodeCreationStatus.Success) { return; } deviceOutputNode = audioOutputResult.DeviceOutputNode; fileInputNode.AddOutgoingConnection(deviceOutputNode); }
private async Task CreateAudioGraph() { encoding = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low); encoding.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16); AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Speech); settings.EncodingProperties = encoding.Audio; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { throw new Exception($"AudioGraph creation failed because {result.Status.ToString()}"); } graph = result.Graph; graph.UnrecoverableErrorOccurred += async(AudioGraph sender, AudioGraphUnrecoverableErrorOccurredEventArgs args) => { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { error.Text = "Audio graph: UnrecoverableErrorOccurred"; }); }; }
/// <summary> /// Builds audio graph and assignes output device /// </summary> /// <returns></returns> private async Task BuildAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.GameEffects); settings.EncodingProperties = AudioEncodingProperties.CreatePcm(48000, 2, 32); settings.EncodingProperties.Subtype = MediaEncodingSubtypes.Float; settings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Raw; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { throw (new AudioGraphCreationException(String.Format("AudioGraph creation error: {0}", result.Status.ToString()))); } graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceResult.Status != AudioDeviceNodeCreationStatus.Success) { throw (new AudioGraphCreationException(String.Format("Audio device error: {0}", result.Status.ToString()))); } deviceOutput = deviceResult.DeviceOutputNode; graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; }
/// <summary> /// Create output audio graph /// </summary> /// <param name="deviceId">Overload for default ouput device id</param> public static async Task CreateOutputDeviceNode(string deviceId = null) { // If not in use, redo dispose if (outgraph != null && OutputDeviceID != outgraph.PrimaryRenderDevice.Id) { HeavyDisposeOutGraph(); } // Increment use counter else { outGraphCount++; } Console.WriteLine("Creating AudioGraphs"); // Create an AudioGraph with default settings AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.Media); graphsettings.EncodingProperties = new AudioEncodingProperties(); graphsettings.EncodingProperties.Subtype = "Float"; graphsettings.EncodingProperties.SampleRate = 48000; graphsettings.EncodingProperties.ChannelCount = 2; graphsettings.EncodingProperties.BitsPerSample = 32; graphsettings.EncodingProperties.Bitrate = 3072000; // Determine selected device DeviceInformation selectedDevice; if (deviceId == "Default" || deviceId == null) { selectedDevice = await DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioRenderId(Windows.Media.Devices.AudioDeviceRole.Default)); Windows.Media.Devices.MediaDevice.DefaultAudioRenderDeviceChanged += MediaDevice_DefaultAudioRenderDeviceChanged; } else { try { selectedDevice = await DeviceInformation.CreateFromIdAsync(deviceId); } catch { selectedDevice = await DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioRenderId(Windows.Media.Devices.AudioDeviceRole.Default)); deviceId = "Default"; } } // Set selected device graphsettings.PrimaryRenderDevice = selectedDevice; // Create graph CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings); if (graphresult.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } // "Save" graph outgraph = graphresult.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await outgraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create the FrameInputNode at the same format as the graph, except explicitly set stereo. frameInputNode = outgraph.CreateFrameInputNode(outgraph.EncodingProperties); frameInputNode.AddOutgoingConnection(deviceOutputNode); OutputDeviceID = deviceId; // Begin playing frameInputNode.Start(); ready = true; outgraph.Start(); }
private async void StartListening() { Debug.WriteLine("Starting to listen for input."); Listening = true; while (Listening) { try { // Based on the protocol we've defined, the first uint is the size of the message. [UInt (4)] + [Message (1*n)] - The UInt describes the length of the message. uint readLength = await reader.LoadAsync(sizeof(uint)); // Check if the size of the data is expected (otherwise the remote has already terminated the connection). if (!Listening) { break; } if (readLength < sizeof(uint)) { Listening = false; Disconnect(); Debug.WriteLine("The connection has been terminated."); break; } // uint messageLength = reader.ReadUInt32(); // Debug.WriteLine("messageLength: " + messageLength.ToString()); // Load the rest of the message since you already know the length of the data expected. //readLength = await reader.LoadAsync(messageLength); // Check if the size of the data is expected (otherwise the remote has already terminated the connection). // if (!Listening) break; // if (readLength < messageLength) { // Listening = false; // Disconnect(); // Debug.WriteLine("The connection has been terminated."); // break; } string message = reader.ReadString(readLength); Debug.WriteLine("Received messageString: " + message); if (DO_RESPONSE) { SendMessage("1"); } if (message == "Star") { Debug.WriteLine("AudioGraph creation error: "); AudioGraph audioGraph; AudioGraphSettings settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Alerts); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { Debug.WriteLine("AudioGraph creation error: " + result.Status.ToString()); //ShowErrorMessage("AudioGraph creation error: " + result.Status.ToString()); } audioGraph = result.Graph; ElementSoundPlayer.Volume = 1.0; } if (message == "tSto") { Debug.WriteLine("AudioGraph creation error: "); ElementSoundPlayer.State = ElementSoundPlayerState.Off; } } catch (Exception e) { // If this is an unknown status it means that the error is fatal and retry will likely fail. if (SocketError.GetStatus(e.HResult) == SocketErrorStatus.Unknown) { Listening = false; Disconnect(); Debug.WriteLine("Fatal unknown error occurred."); break; } } } Debug.WriteLine("Stopped to listen for input."); }
private async Task CreateAudioGraph() { allFrequencyList.Clear(); inputNotes.Clear(); // Create an AudioGraph with default settings AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; if (Int32.TryParse(DesiredLatencyText.Text, out var bufLength)) { bufferLength = bufLength * 48; } settings.DesiredSamplesPerQuantum = bufferLength; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; Debug.WriteLine($"Set samples per quantum to {graph.SamplesPerQuantum}"); // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); speakerContainer.Background = new SolidColorBrush(Colors.Red); } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage); speakerContainer.Background = new SolidColorBrush(Colors.Green); // Create the FrameInputNode at the same format as the graph, except explicitly set the number of input channels // to allow us to use a multiple channel stream AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties; uint chanCount = 1; if (UInt32.TryParse(ChannelsPerStreamText.Text, out var chanCnt)) { chanCount = chanCnt; } nodeEncodingProperties.ChannelCount = chanCount; if (Int32.TryParse(NoteCountText.Text, out var noteCnt)) { noteCount = noteCnt; } CreateNotes(noteCount, nodeEncodingProperties, deviceOutputNode); double lowNote = allFrequencyList[0]; double hiNote = allFrequencyList[allFrequencyList.Count - 1]; noteCount = inputNotes.Keys.Count; var mSLength = 1000.0 * (double)graph.SamplesPerQuantum / (double)graph.EncodingProperties.SampleRate; setupDescription = $"playing {allFrequencyList.Count} notes in {allFrequencyList.Count / 3} octaves ({lowNote:0.0} -> {hiNote:0.0}), {graph.SamplesPerQuantum} samples, in {inputNotes.Count}, {mSLength:0.0}mS buffers"; DetailText.Text = setupDescription; frameContainer.Background = new SolidColorBrush(Colors.Green); // Start the graph since we will only start/stop the frame input node graph.Start(); }
private async void InitializeAsync() { await UpdatePlaylistAsync(); MasterVolume = 100; await AudioSourceManager.InitializeAsync(new FileSystem(), "GroorineCore"); var settings = new AudioGraphSettings(AudioRenderCategory.Media) { }; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { await new MessageDialog("Can't create AudioGraph! Application will stop...").ShowAsync(); Application.Current.Exit(); } _graph = result.Graph; CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { await new MessageDialog("Can't create DeviceOutputNode! Application will stop...").ShowAsync(); Application.Current.Exit(); } _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties; nodeEncodingProperties.ChannelCount = 2; _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties); _frameInputNode.AddOutgoingConnection(_deviceOutputNode); _frameInputNode.Stop(); _player = new Player((int)nodeEncodingProperties.SampleRate); _player.PropertyChanged += (sender, args) => { switch (args.PropertyName) { case nameof(_player.IsPlaying): _synchronizationContext.Post(o => { if (!_player.IsPlaying && !_player.IsPausing && IsPlaying) { IsPlaying = CanStop = false; } }, null); break; } }; _frameInputNode.QuantumStarted += (sender, args) => { var numSamplesNeeded = (uint)args.RequiredSamples; if (numSamplesNeeded != 0) { //_synchronizationContext.Post(o => //{ // foreach (var a in Channels) // a.Update(); AudioFrame audioData = GenerateAudioData(numSamplesNeeded); _frameInputNode.AddFrame(audioData); //}, null); } }; _graph.Start(); _frameInputNode.Start(); /* * _player = new Player(); * * _buffer = _player.CreateBuffer(50); * * _bwp = new BufferedWaveProvider(new WaveFormat(44100, 16, 2)); * _nativePlayer = new WasapiOutRT(AudioClientShareMode.Shared, 50); * _nativePlayer.Init(() => _bwp); * _nativePlayer.Play(); */ IsInitialized = true; /* * while (true) * { * _player.GetBuffer(_buffer); * * var b = ToByte(_buffer); * _bwp.AddSamples(b, 0, b.Length); * while (_bwp.BufferedBytes > _buffer.Length * 2) * await Task.Delay(1); * } */ }
private async Task CreateAudioGraph() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; //this hard coded, needs to be dynamically set for output devices settings.PrimaryRenderDevice = outputDevices[Speakers.SelectedIndex - 1]; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph //rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage); return; } graph = result.Graph; //rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage); // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node // rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); //outputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage); // outputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node // rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage); // inputDeviceContainer.Background = new SolidColorBrush(Colors.Red); return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; // rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage); // inputDeviceContainer.Background = new SolidColorBrush(Colors.Green); // Since graph is successfully created, enable the button to select a file output // fileButton.IsEnabled = true; // Disable the graph button to prevent accidental click //createGraphButton.IsEnabled = false; // Because we are using lowest latency setting, we need to handle device disconnection errors graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; }
/// <summary> /// Create input audio graph /// </summary> /// <param name="deviceId">Override for default input device id</param> public static async Task <bool> CreateInputDeviceNode(string deviceId = null) { // If not in use, redo dispose if (ingraph != null && deviceId != InputDeviceID) { HeavyDisposeInGraph(); } // Increment use counter else { inGraphCount++; } Console.WriteLine("Creating AudioGraphs"); // Create an AudioGraph with default settings AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.Media); graphsettings.EncodingProperties = new AudioEncodingProperties(); graphsettings.EncodingProperties.Subtype = "Float"; graphsettings.EncodingProperties.SampleRate = 48000; graphsettings.EncodingProperties.ChannelCount = 2; graphsettings.EncodingProperties.BitsPerSample = 32; graphsettings.EncodingProperties.Bitrate = 3072000; CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings); if (graphresult.Status != AudioGraphCreationStatus.Success) { // Cannot create graph inGraphCount--; LocalState.VoiceState.SelfMute = true; VoiceManager.lockMute = true; return(false); } // "Save" graph ingraph = graphresult.Graph; // Create frameOutputNode AudioGraphSettings nodesettings = new AudioGraphSettings(AudioRenderCategory.GameChat); nodesettings.EncodingProperties = AudioEncodingProperties.CreatePcm(48000, 2, 32); nodesettings.DesiredSamplesPerQuantum = 960; nodesettings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; frameOutputNode = ingraph.CreateFrameOutputNode(ingraph.EncodingProperties); quantum = 0; ingraph.QuantumStarted += Graph_QuantumStarted; // Determine selected device DeviceInformation selectedDevice; if (deviceId == "Default" || deviceId == null) { string device = Windows.Media.Devices.MediaDevice.GetDefaultAudioCaptureId(Windows.Media.Devices.AudioDeviceRole.Default); if (!string.IsNullOrEmpty(device)) { selectedDevice = await DeviceInformation.CreateFromIdAsync(device); Windows.Media.Devices.MediaDevice.DefaultAudioCaptureDeviceChanged += MediaDevice_DefaultAudioCaptureDeviceChanged; } else { inGraphCount--; LocalState.VoiceState.SelfMute = true; VoiceManager.lockMute = true; return(false); } } else { try { selectedDevice = await DeviceInformation.CreateFromIdAsync(deviceId); } catch { selectedDevice = await DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioCaptureId(Windows.Media.Devices.AudioDeviceRole.Default)); deviceId = "Default"; } } CreateAudioDeviceInputNodeResult result = await ingraph.CreateDeviceInputNodeAsync(MediaCategory.Media, nodesettings.EncodingProperties, selectedDevice); if (result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node inGraphCount--; LocalState.VoiceState.SelfMute = true; VoiceManager.lockMute = true; return(false); } // Attach input device deviceInputNode = result.DeviceInputNode; deviceInputNode.AddOutgoingConnection(frameOutputNode); InputDeviceID = deviceId; // Begin playing frameOutputNode.Start(); ingraph.Start(); return(true); }
protected override async void OnNavigatedTo(NavigationEventArgs e) { var audioInputDevices = await DeviceInformation.FindAllAsync(DeviceClass.AudioCapture); foreach (var device in audioInputDevices) { if (device.Name.ToLower().Contains("usb")) { audioInput = device; break; } } if (audioInput == null) { Debug.WriteLine("Could not find USB audio card"); return; } var audioOutputDevices = await DeviceInformation.FindAllAsync(DeviceClass.AudioRender); foreach (var device in audioOutputDevices) { if (device.Name.ToLower().Contains("usb")) { audioOutput = device; } else { raspiAudioOutput = device; } } if (audioOutput == null) { Debug.WriteLine("Could not find USB audio card"); return; } // Set up LED strips await leftStrip.Begin(); await rightStrip.Begin(); //await AudioTest(); AudioGraphSettings audioGraphSettings = new AudioGraphSettings(AudioRenderCategory.Media); audioGraphSettings.DesiredSamplesPerQuantum = 440; audioGraphSettings.DesiredRenderDeviceAudioProcessing = AudioProcessing.Default; audioGraphSettings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired; audioGraphSettings.PrimaryRenderDevice = raspiAudioOutput; CreateAudioGraphResult audioGraphResult = await AudioGraph.CreateAsync(audioGraphSettings); if (audioGraphResult.Status != AudioGraphCreationStatus.Success) { Debug.WriteLine("AudioGraph creation failed! " + audioGraphResult.Status); return; } audioGraph = audioGraphResult.Graph; //Debug.WriteLine(audioGraph.SamplesPerQuantum); CreateAudioDeviceInputNodeResult inputNodeResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, audioGraph.EncodingProperties, audioInput); if (inputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { Debug.WriteLine("AudioDeviceInputNode creation failed! " + inputNodeResult.Status); return; } AudioDeviceInputNode inputNode = inputNodeResult.DeviceInputNode; CreateAudioDeviceOutputNodeResult outputNodeResult = await audioGraph.CreateDeviceOutputNodeAsync(); if (outputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { Debug.WriteLine("AudioDeviceOutputNode creation failed!" + outputNodeResult.Status); } AudioDeviceOutputNode outputNode = outputNodeResult.DeviceOutputNode; frameOutputNode = audioGraph.CreateFrameOutputNode(); inputNode.AddOutgoingConnection(frameOutputNode); inputNode.AddOutgoingConnection(outputNode); cube.SetSpeedStripLedColors(LedColorLists.rainbowColors); audioGraph.QuantumProcessed += AudioGraph_QuantumProcessed; audioGraph.UnrecoverableErrorOccurred += AudioGraph_UnrecoverableErrorOccurred; audioGraph.Start(); outputNode.Start(); inputNode.Start(); frameOutputNode.Start(); cube.Reset(); cube.Update(); //await MathFunc(); //cube.ApplyColorFunction((x, y, z) => //{ // Color c = Color.FromArgb(255, // (byte)((x / 14.0) * 255.0), // (byte)((y / 14.0) * 255.0), // (byte)((z / 14.0) * 255.0)); // return c; //}); //cube.SetLedColors(); //cube.Update(); //cube.bottomFrontEdge.SetColor(Colors.Red); //cube.bottomRightEdge.SetColor(Colors.OrangeRed); //cube.bottomBackEdge.SetColor(Colors.Yellow); //cube.bottomLeftEdge.SetColor(Colors.Green); //cube.frontLeftEdge.SetColor(Colors.Blue); //cube.frontTopEdge.SetColor(Colors.Purple); //cube.rightLeftEdge.Brightness = 10; //cube.rightLeftEdge.SetColor(Colors.Red); //cube.rightTopEdge.Brightness = 10; //cube.rightTopEdge.SetColor(Colors.OrangeRed); //cube.backLeftEdge.Brightness = 10; //cube.backLeftEdge.SetColor(Colors.Yellow); //cube.backTopEdge.Brightness = 10; //cube.backTopEdge.SetColor(Colors.Green); //cube.leftLeftEdge.Brightness = 10; //cube.leftLeftEdge.SetColor(Colors.Blue); //cube.leftTopEdge.Brightness = 10; //cube.leftTopEdge.SetColor(Colors.Purple); //cube.Update(); //await RainbowTest(); //cube.Brightness = 30; //await FlashTest(); //SetAll(); //await FadeTest(); //cube.Reset(); //cube.Update(); //await cube.rightLeftEdge.DoLine(); //ZackTest(); }
public async Task InitializeUWPAudio() { AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector()); foreach (DeviceInformation dev in outputDevices) { if (dev.Name.Contains(OMENHeadset)) { settings.PrimaryRenderDevice = dev; } } CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } uwpAudioGraph = result.Graph; // Create a device output node CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await uwpAudioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node return; } deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create a device input node using the default audio input device CreateAudioDeviceInputNodeResult deviceInputNodeResult = await uwpAudioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; #if false //For File recording not use now. FileSavePicker saveFilePicker = new FileSavePicker(); saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List <string>() { ".wav" }); saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>() { ".wma" }); saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List <string>() { ".mp3" }); saveFilePicker.SuggestedFileName = "New Audio Track"; StorageFile file = await saveFilePicker.PickSaveFileAsync(); MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file); // Operate node at the graph format, but save file at the specified format CreateAudioFileOutputNodeResult fileOutputNodeResult = await uwpAudioGraph.CreateFileOutputNodeAsync(file, fileProfile); if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success) { // FileOutputNode creation failed //rootPage.NotifyUser(String.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage); //fileButton.Background = new SolidColorBrush(Colors.Red); return; } fileOutputNode = fileOutputNodeResult.FileOutputNode; deviceInputNode.AddOutgoingConnection(fileOutputNode); #endif // Connect the input node to both output nodes deviceInputNode.AddOutgoingConnection(deviceOutputNode); }
unsafe private void OnCreateGraphCompleted(IAsyncOperation <CreateAudioGraphResult> asyncInfo, AsyncStatus asyncStatus) { if (asyncStatus == AsyncStatus.Completed) { CreateAudioGraphResult result = asyncInfo.GetResults(); if (result.Status == AudioGraphCreationStatus.Success) { this.m_AudioGraph = result.Graph; this.m_QuantumSamples = new float[this.m_AudioGraph.SamplesPerQuantum * this.m_AudioGraph.EncodingProperties.ChannelCount]; m_FFTSampleSize = 2; while (m_FFTSampleSize < this.m_AudioGraph.SamplesPerQuantum) { m_FFTSampleSize *= 2; if (pin != IntPtr.Zero) { FFTWSharp.fftwf.free(pin); pin = IntPtr.Zero; } if (pout != IntPtr.Zero) { FFTWSharp.fftwf.free(pout); pout = IntPtr.Zero; } pin = FFTWSharp.fftwf.malloc(m_FFTSampleSize * 8); pout = FFTWSharp.fftwf.malloc(m_FFTSampleSize * 8); float *fpin = (float *)pin; float *fpout = (float *)pout; for (int i = 0; i < m_FFTSampleSize * 2; i++) { fpin[i] = 0.0f; fpout[i] = 0.0f; } if (fplan != IntPtr.Zero) { FFTWSharp.fftwf.destroy_plan(fplan); fplan = IntPtr.Zero; } fplan = FFTWSharp.fftwf.dft_1d(m_FFTSampleSize, pin, pout, FFTWSharp.fftw_direction.Forward, FFTWSharp.fftw_flags.Estimate); } // this will fail with access denied // unless you specify microphone // in the Package.appxmanifest IAsyncOperation <CreateAudioDeviceInputNodeResult> result2 = null; if (this.m_SelectedDevice != null) { result2 = this.m_AudioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Other, this.m_AudioGraph.EncodingProperties, this.m_SelectedDevice); } else { result2 = this.m_AudioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Other); } if (result2 != null) { result2.Completed = new AsyncOperationCompletedHandler <CreateAudioDeviceInputNodeResult>(OnCreateInputCompleted); } else { ShowMessage($"Failed to create graph: {result.Status}"); } } else { ShowMessage($"Failed to create graph: {result.Status}"); } } else { ShowMessage($"Failed to create graph: {asyncStatus}"); } }