/// <summary> /// Creates an instance of AudioGraph and sets io_progress /// </summary> public async Task <CreateAudioGraphResult> Init( Progress <double> progress) { // set io_progress var to show progress of input-output _ioProgress = progress; // initialize settings for AudioGraph var settings = new AudioGraphSettings( AudioRenderCategory.Media ); // if audioGraph was previously created if (_audioGraph != null) { _audioGraph.Dispose(); _audioGraph = null; } var result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { _audioGraph = result.Graph; } return(result); }
private void Recover() { App.Log("Recover MediaPlayer"); graph.Stop(); try { mainInputNode.Dispose(); } catch (Exception) { } try { subInputNode.Dispose(); } catch (Exception) { } try { outputNode.Dispose(); } catch (Exception) { } mainInputNode = null; subInputNode = null; outputNode = null; mainSong = null; subSong = null; try { graph.Dispose(); } catch (Exception) { } graph = null; Init(); }
protected override void OnNavigatedFrom(NavigationEventArgs e) { if (graph != null) { graph.Dispose(); } }
public void Stop() { m_audioDataMutex.WaitOne(); m_isRunning = false; m_isFlushing = false; if (m_audioGraph != null) { m_audioGraph.Stop(); } if (m_deviceOutputNode != null) { m_deviceOutputNode.Dispose(); m_deviceOutputNode = null; } if (m_frameInputNode != null) { m_frameInputNode.Dispose(); m_frameInputNode = null; } if (m_audioGraph != null) { m_audioGraph.Dispose(); m_audioGraph = null; } m_audioData = null; m_audioDataMutex.ReleaseMutex(); }
public void Dispose() { if (isInitialized) { audioGraph.Dispose(); } }
public async Task <bool> InitializeAudioGraphAsync() { var audioGraphSettings = new AudioGraphSettings(AudioRenderCategory.Media); var result = await AudioGraph.CreateAsync(audioGraphSettings); LastStatus = result.Status.ToString(); if (result.Status != AudioGraphCreationStatus.Success) { return(false); } _audioGraph = result.Graph; _audioGraph.UnrecoverableErrorOccurred += (sender, args) => OnErrorOccurred(args); var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync(); LastStatus = outputResult.Status.ToString(); if (outputResult.Status != AudioDeviceNodeCreationStatus.Success) { _audioGraph.Dispose(); return(false); } _outputNode = outputResult.DeviceOutputNode; CreateEchoEffect(); CreateLimiterEffect(); CreateReverbEffect(); CreateEqualizerEffect(); return(true); }
// // // //private async Task CreateAudioGraphAsync() //{ // audGraphResult = await AudioGraph.CreateAsync(audGraphSettings); // var b = 0; //} // // // //private async Task CreateAudioGraphDeviceOutputNodeAsync() //{ // deviceOutputNodeResult = await audGraph.CreateDeviceOutputNodeAsync(); //} // // // //private async Task CreateAudioGraphDeviceInputNodeAsync() //{ // deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); //} // // // public void Dispose() { if (audGraph != null) { audGraph.Dispose(); } }
public void Dispose() { WaveformRenderer = null; graph?.Dispose(); fileInputNode?.Dispose(); deviceOutput?.Dispose(); }
/// <summary> /// Stops the audio stream. /// </summary> public Task Stop() { if (Active) { Active = false; outputNode?.Stop(); audioGraph?.Stop(); OnActiveChanged?.Invoke(this, false); } outputNode?.Dispose(); outputNode = null; if (audioGraph != null) { audioGraph.QuantumStarted -= Graph_QuantumStarted; audioGraph.UnrecoverableErrorOccurred -= Graph_UnrecoverableErrorOccurred; audioGraph.Dispose(); audioGraph = null; } return(Task.CompletedTask); }
// Init the AudioGraph // despite the Aync methods - this will exec synchronously to get the InitPhase only get done when all is available private void InitAudioGraph( ) { LOG.Log("InitAudioGraph: Begin"); if (!_canPlay) { LOG.Log("InitAudioGraph: Canceled with _canPlay = false"); return; // cannot even try.. } // MUST WAIT UNTIL all items are created, else one may call Play too early... // cleanup existing items if (_deviceOutputNode != null) { _deviceOutputNode.Dispose( ); _deviceOutputNode = null; } if (_audioGraph != null) { _audioGraph.Dispose( ); _audioGraph = null; } // Create an AudioGraph AudioGraphSettings settings = new AudioGraphSettings(_renderCat) { PrimaryRenderDevice = null, // If PrimaryRenderDevice is null, the default playback device will be used. MaxPlaybackSpeedFactor = 2, // should preserve some memory }; // We await here the execution without providing an async method ... var resultAG = WindowsRuntimeSystemExtensions.AsTask(AudioGraph.CreateAsync(settings)); resultAG.Wait( ); if (resultAG.Result.Status != AudioGraphCreationStatus.Success) { LOG.LogError($"InitAudioGraph: Failed to create AudioGraph with RenderCategory: {_renderCat}"); LOG.LogError($"InitAudioGraph: AudioGraph creation: {resultAG.Result.Status}, TaskStatus: {resultAG.Status}" + $"\nExtError: {resultAG.Result.ExtendedError}"); _canPlay = false; return; } _audioGraph = resultAG.Result.Graph; LOG.Log($"InitAudioGraph: AudioGraph: [{_audioGraph.EncodingProperties}]"); // Create a device output node // The output node uses the PrimaryRenderDevice of the audio graph. // We await here the execution without providing an async method ... var resultDO = WindowsRuntimeSystemExtensions.AsTask(_audioGraph.CreateDeviceOutputNodeAsync()); resultDO.Wait( ); if (resultDO.Result.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device output node LOG.LogError($"InitAudioGraph: DeviceOutputNode creation: {resultDO.Result.Status}, TaskStatus: {resultDO.Status}" + $"\nExtError: {resultDO.Result.ExtendedError}"); _canPlay = false; return; } _deviceOutputNode = resultDO.Result.DeviceOutputNode; LOG.Log($"InitAudioGraph: DeviceOutputNode: [{_deviceOutputNode.Device}]"); LOG.Log($"InitAudioGraph: InitAudioGraph-END"); }
protected override void OnNavigatedFrom(NavigationEventArgs e) { // Destroy the graph if the page is naviated away from if (graph != null) { graph.Dispose(); } }
public void Stop() { if (m_audioGraph != null) { m_audioGraph.Stop(); m_audioGraph.Dispose(); m_audioGraph = null; } }
/// <summary> /// Disposes the AudioGraph and resets the properties. /// </summary> public static void ResetMicrophoneInput() { graph.Dispose(); isDisposed = true; isListing = false; isCreating = false; stopOnCreation = false; }
// <SnippetUnrecoverableError> private void AudioGraph_UnrecoverableErrorOccurred(AudioGraph sender, AudioGraphUnrecoverableErrorOccurredEventArgs args) { if (sender == audioGraph && args.Error != AudioGraphUnrecoverableError.None) { Debug.WriteLine("The audio graph encountered and unrecoverable error."); audioGraph.Stop(); audioGraph.Dispose(); InitAudioGraph(); } }
/// <summary> /// Handles AudioGraph errors (e.g. device change) and attempts to restart playback from same position /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private async void Graph_UnrecoverableErrorOccurred(AudioGraph sender, AudioGraphUnrecoverableErrorOccurredEventArgs args) { Console.WriteLine(String.Format("Unrecoverable Error Occurred, restaring: {0}", args.Error.ToString())); TimeSpan CurrentPlayPosition = currentAudioFileInputNode != null ? currentAudioFileInputNode.Position : TimeSpan.Zero; sender.Dispose(); StartButtonEnabled = false; StopButtonEnabled = false; await LoadAudioFile(CurrentPlayPosition); }
public void Dispose() { if (fixNode != null) { fixNode.Dispose(); } if (errorNode != null) { errorNode.Dispose(); } foreach (var node in typingNodes) { if (node != null) { node.Dispose(); } } foreach (var node in spaceNodes) { if (node != null) { node.Dispose(); } } if (backspaceNode != null) { backspaceNode.Dispose(); } if (finishedNode != null) { finishedNode.Dispose(); } if (deviceOutputNode != null) { deviceOutputNode.Dispose(); } if (audioGraph != null) { audioGraph.Dispose(); } GC.SuppressFinalize(this); }
public void Stop() { if (graph == null) { return; } graph.Stop(); graph.Dispose(); graph = null; }
private async Task ResetAudioGraph() { if (graph != null) { graph.Stop(); graph.Dispose(); } await CreateAudioGraph(); }
private async Task CreateAudioGraph() { if (graph != null) { graph.Dispose(); } AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.SystemDefault; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { // Cannot create graph return; } graph = result.Graph; // Create a device input node using the default audio input device (manifest microphone!!!!) CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { // Cannot create device input node return; } deviceInputNode = deviceInputNodeResult.DeviceInputNode; //creating file StorageFolder storageFolder = Windows.Storage.ApplicationData.Current.LocalFolder; StorageFile file = await storageFolder.CreateFileAsync("sample.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting); path = file.Path.ToString(); MediaEncodingProfile fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium); // Operate node at the graph format, but save file at the specified format CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile); if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success) { // FileOutputNode creation failed return; } fileOutputNode = fileOutputNodeResult.FileOutputNode; // Connect the input node to both output nodes deviceInputNode.AddOutgoingConnection(fileOutputNode); }
public async Task <string> EndRecordAsync() { _graph.Stop(); await _outputNode.FinalizeAsync(); _outputNode = null; _graph.Dispose(); _graph = null; return(_filePath); }
public void Stop() { m_waveBufferMutex.WaitOne(); if (m_audioGraph != null) { m_audioGraph.Stop(); m_audioGraph.Dispose(); m_audioGraph = null; } m_waveBufferMutex.ReleaseMutex(); }
public void Dispose() { _audioGraph?.Dispose(); _outputNode?.Dispose(); _inputNode?.Dispose(); _timer?.Dispose(); _timer = null; _audioGraph = null; _outputNode = null; _inputNode = null; }
async Task StopRecordingAsync() { if (audioGraph != null) { audioGraph.Stop(); await audioFileOutputNode.FinalizeAsync(); audioGraph.Dispose(); audioGraph = null; Debug.WriteLine("Stopped recording."); } }
private async Task CreateAudioGraph() { var outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector()); var settings = new AudioGraphSettings(AudioRenderCategory.Media) { QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency, PrimaryRenderDevice = outputDevices[0] }; var result = await AudioGraph.CreateAsync(settings); if (result.Status != AudioGraphCreationStatus.Success) { await new MessageDialog("AudioGraph Creation Error: " + result.Status).ShowAsync(); return; } _audioGraph = result.Graph; // Create a device output node var deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync(); if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { await new MessageDialog("Audio Device Output unavailable: " + deviceOutputNodeResult.Status).ShowAsync(); return; } _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode; // Create a device input node using the default audio input device var deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other); if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success) { await new MessageDialog("Audio Device Input unavailable: " + deviceInputNodeResult.Status).ShowAsync(); _audioGraph.Dispose(); _audioGraph = null; return; } _deviceInputNode = deviceInputNodeResult.DeviceInputNode; // Because we are using lowest latency setting, // in general, we need to handle device disconnection errors // graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred; }
private async Task openMicrophonePopup() { AudioGraphSettings settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media); CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); AudioGraph audioGraph = result.Graph; CreateAudioDeviceInputNodeResult resultNode = await audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media); AudioDeviceInputNode deviceInputNode = resultNode.DeviceInputNode; deviceInputNode.Dispose(); audioGraph.Dispose(); }
async private void ToggleRecord2(object sender, RoutedEventArgs e) { var btn_record_audio = sender as ToggleButton; if (btn_record_audio.IsChecked == false) { _graph_record.Stop(); _graph_record.Dispose(); await PlayAudio(_target_file); //using the media element to play the sound //var raf_stream = await _target_file.OpenReadAsync(); //media.SetSource(raf_stream, ""); //media.Play(); } else { //initialize the audio graph for recording and then start recording AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { _graph_record = result.Graph; //setup the input var input_node = (await _graph_record.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Other)).DeviceInputNode; //setup the output (place where audio will be recorded to) var feedback_folder = await Windows.Storage.ApplicationData.Current.LocalFolder.CreateFolderAsync("AudioFeedback", CreationCollisionOption.OpenIfExists); _target_file = await feedback_folder.CreateFileAsync("audio message.mp3", CreationCollisionOption.GenerateUniqueName); var profile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High); var file_output_node = (await _graph_record.CreateFileOutputNodeAsync(_target_file, profile)).FileOutputNode; //direct the input to the output input_node.AddOutgoingConnection(file_output_node); media.Stop(); //stop playback since we are recording _graph_record.Start(); } else { await new MessageDialog("Could not initialize recorder").ShowAsync(); } } }
/// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> private void Dispose(bool disposing) { if (!disposedValue) { if (disposing) { // Release managed objects. Stop(); AudioGraph.Dispose(); frameInputNode.Dispose(); } disposedValue = true; } }
private void SaveAndCleanGraph() { new TaskFactory().StartNew(async() => { var creationTask = _storageFolder.CreateFileAsync(Guid.NewGuid() + ".wav", Windows.Storage.CreationCollisionOption.ReplaceExisting); var file = await creationTask; CopyFile(_mainFile, file); await _fileOutputNode.FinalizeAsync(); _audioGraph.Stop(); _audioGraph.Dispose(); await InitGraph(); }); }
public static void StopMicDevice() { StringBuilder sb = new StringBuilder(260); // 260 is Windows MAX_PATH as defined in c++. paths cant be longer than this and the plugin knows it, too Task.Factory.StartNew(() => { MicStopRecording(sb); Debug.WriteLine(sb.ToString()); CheckForErrorOnCall(MicDestroy()); graph.Dispose(); // unfortunately, the app needs to do this to be able to re-init plugin later graph = null; // this, too } ); }
/// <summary> /// Overridable Dispose /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> /// <param name="disposing">Disposing flag</param> protected virtual void Dispose(bool disposing) { if (!disposedValue) { if (disposing) { // dispose managed state (managed objects) // cleanup existing items _fileInputNode?.Dispose( ); _deviceOutputNode?.Dispose( ); _audioGraph?.Dispose( ); } disposedValue = true; } }
private async void Graph_UnrecoverableErrorOccurred(AudioGraph sender, AudioGraphUnrecoverableErrorOccurredEventArgs args) { // Recreate the graph and all nodes when this happens await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () => { sender.Dispose(); // Re-query for devices await PopulateDeviceList(); // Reset UI fileButton.IsEnabled = false; recordStopButton.IsEnabled = false; recordStopButton.Content = "Record"; outputDeviceContainer.Background = new SolidColorBrush(Color.FromArgb(255, 74, 74, 74)); audioPipe1.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); audioPipe2.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); }); }
private async void Graph_UnrecoverableErrorOccurred(AudioGraph sender, AudioGraphUnrecoverableErrorOccurredEventArgs args) { // Recreate the graph and all nodes when this happens await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { sender.Dispose(); }); }