// Event handler for file completion event private void FileInput_FileCompleted(AudioFileInputNode sender, object args) { // File playback is done. Stop the graph _graph.Stop(); // Reset the file input node so starting the graph will resume playback from beginning of the file sender.Reset(); }
public void Stop() { m_audioDataMutex.WaitOne(); m_isRunning = false; m_isFlushing = false; if (m_audioGraph != null) { m_audioGraph.Stop(); } if (m_deviceOutputNode != null) { m_deviceOutputNode.Dispose(); m_deviceOutputNode = null; } if (m_frameInputNode != null) { m_frameInputNode.Dispose(); m_frameInputNode = null; } if (m_audioGraph != null) { m_audioGraph.Dispose(); m_audioGraph = null; } m_audioData = null; m_audioDataMutex.ReleaseMutex(); }
/// <summary> /// Starts every time when audio frame is read from a file /// </summary> private void FileInput_QuantumStarted(AudioGraph sender, object args) { // to not report too many times if (sender.CompletedQuantumCount % 100 == 0) { var numOfSamples = 0.0000001 * _fileInputNode.Duration.Ticks * _fileInputNode.EncodingProperties.SampleRate; var dProgress = 100 * (int)sender.CompletedQuantumCount * sender.SamplesPerQuantum / numOfSamples; _ioProgress?.Report(dProgress); } if (_audioDataCurrentPosition == 0) { _frameOutputNode.Start(); } var frame = _frameOutputNode.GetFrame(); ProcessInputFrame(frame); if (_finished) { _frameOutputNode?.Stop(); _audioGraph?.Stop(); } }
private void Recover() { App.Log("Recover MediaPlayer"); graph.Stop(); try { mainInputNode.Dispose(); } catch (Exception) { } try { subInputNode.Dispose(); } catch (Exception) { } try { outputNode.Dispose(); } catch (Exception) { } mainInputNode = null; subInputNode = null; outputNode = null; mainSong = null; subSong = null; try { graph.Dispose(); } catch (Exception) { } graph = null; Init(); }
private async Task ResetAudioGraph() { if (graph != null) { graph.Stop(); graph.Dispose(); } await CreateAudioGraph(); }
private void TogglePlay() { if (graphButton.Content.Equals("Start Graph")) { graph.Start(); graphButton.Content = "Stop Graph"; audioPipe.Fill = new SolidColorBrush(Colors.Blue); } else { graph.Stop(); graphButton.Content = "Start Graph"; audioPipe.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); } }
private void TogglePlay() { // Toggle playback if (playing == false) { System.Diagnostics.Debug.WriteLine("Playing"); graph.Start(); playing = true; } else { playing = false; graph.Stop(); } }
private async Task ToggleRecordStop() { if (recordStopButton.Content.Equals("Record")) { graph.Start(); recordStopButton.Content = "Stop"; audioPipe1.Fill = new SolidColorBrush(Colors.Blue); audioPipe2.Fill = new SolidColorBrush(Colors.Blue); } else if (recordStopButton.Content.Equals("Stop")) { // Good idea to stop the graph to avoid data loss graph.Stop(); audioPipe1.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); audioPipe2.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync(); if (finalizeResult != TranscodeFailureReason.None) { // Finalization of file failed. Check result code to see why rootPage.NotifyUser(String.Format("Finalization of file failed because {0}", finalizeResult.ToString()), NotifyType.ErrorMessage); fileButton.Background = new SolidColorBrush(Colors.Red); return; } recordStopButton.Content = "Record"; rootPage.NotifyUser("Recording to file completed successfully!", NotifyType.StatusMessage); fileButton.Background = new SolidColorBrush(Colors.Green); recordStopButton.IsEnabled = false; createGraphButton.IsEnabled = false; } }
/// <summary> /// 9. Update the emitter position when Spatial Sound originating position. /// </summary> private async void UpdateEmitter() { await this.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { if (GraphState == GraphStateEnum.Playing) { _spatialSoundsIdx++; } if (_spatialSoundsIdx < _spatialSounds.Length) { ActiveSound.Visibility = Visibility.Collapsed; await CreateEmitter(_spatialSoundsIdx); // Update the visual indicater Canvas.SetLeft(this.ActiveSound, _spatialSounds[_spatialSoundsIdx].ImageResourceLocation.X *scaleFactor); Canvas.SetTop(this.ActiveSound, _spatialSounds[_spatialSoundsIdx].ImageResourceLocation.Y *scaleFactor); ActiveSound.Visibility = Visibility.Visible; } else { this.GraphState = GraphStateEnum.Stopped; _graph.Stop(); _graph = null; } }); }
// will be called when speaking has finished private void MediaSourceInputNode_MediaSourceCompleted(MediaSourceAudioInputNode sender, object args) { _audioGraph?.Stop( ); _deviceOutputNode?.Start( ); // restart output - needed ?? EndOfSpeak( ); }
/// <summary> /// Stops the audio stream. /// </summary> public Task Stop() { if (Active) { Active = false; outputNode?.Stop(); audioGraph?.Stop(); OnActiveChanged?.Invoke(this, false); } outputNode?.Dispose(); outputNode = null; if (audioGraph != null) { audioGraph.QuantumStarted -= Graph_QuantumStarted; audioGraph.UnrecoverableErrorOccurred -= Graph_UnrecoverableErrorOccurred; audioGraph.Dispose(); audioGraph = null; } return(Task.CompletedTask); }
private void TogglePlay() { if (graphButton.Content.Equals("Start Graph")) { graph.Start(); graphButton.Content = "Stop Graph"; audioPipe3.Fill = new SolidColorBrush(Colors.Blue); if (fileInputNode1 != null) { audioPipe1.Fill = new SolidColorBrush(Colors.Blue); } if (fileInputNode2 != null) { audioPipe2.Fill = new SolidColorBrush(Colors.Blue); } } else if (graphButton.Content.Equals("Stop Graph")) { graph.Stop(); graphButton.Content = "Start Graph"; audioPipe1.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); audioPipe2.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); audioPipe3.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); } }
private void Stop_Click(object sender, RoutedEventArgs e) { if (fileInput != null) { fileInput.LoopCount = 0; } graph.Stop(); }
/// <summary> /// Starts when reading of samples from input audio file finished /// </summary> private void FileInput_FileCompleted(AudioFileInputNode sender, object args) { _audioGraph.Stop(); _frameOutputNode?.Stop(); _audioGraph.Dispose(); _audioGraph = null; _finished = true; _ioProgress?.Report(0); }
public void Stop() { if (m_audioGraph != null) { m_audioGraph.Stop(); m_audioGraph.Dispose(); m_audioGraph = null; } }
public void StopAudio() { if (hasInit == false) { return; } isStarted = false; // Stop the AudioGraph which stops all nodes audioGraph.Stop(); }
private async void FileInputNodeOnFileCompleted(AudioFileInputNode sender, object args) { await CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync( CoreDispatcherPriority.Normal, () => { _audioGraph.Stop(); Position = TimeSpan.Zero; }); }
public void Stop() { lock (this) { isPlaying.CheckIfFulfills("Speaker", "playing", true); isPlaying = false; } audioGraph.Stop(); }
public void Stop() { if (graph == null) { return; } graph.Stop(); graph.Dispose(); graph = null; }
private async void RecordStart(object sender, TappedRoutedEventArgs e) { if (!recordButtonPushed) { recordButtonPushed = true; __start_record_button.Source = new BitmapImage(new Uri(this.BaseUri, "Assets/mic-512_pushed.png")); await CreateAudioGraph(); graph.Start(); } else { recordButtonPushed = false; __start_record_button.Source = new BitmapImage(new Uri(this.BaseUri, "Assets/mic-512.png")); graph.Stop(); TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync(); if (finalizeResult != TranscodeFailureReason.None) { // Finalization of file failed. Check result code to see why return; } Guid requestId = Guid.NewGuid(); var Uri = @"https://speech.platform.bing.com/recognize?version=3.0&requestid=" + requestId.ToString() + @"&appID=D4D52672-91D7-4C74-8AD8-42B1D981415A&format=json&locale=en-US&device.os=Windows%20OS&scenarios=ulm&instanceid=f1efbd27-25fd-4212-9332-77cd63176112"; var resp = SendRequestAsync(Uri, accessToken, "audio/wav; samplerate=16000", path); string json = resp; ParsedJson jsonResp = JsonConvert.DeserializeObject <ParsedJson>(json); json = jsonResp.header.lexical.Replace("<profanity>", ""); json = json.Replace("</profanity>", ""); if (allDone) { precise = 0; count = 0; Result.Text = ""; allDone = false; } var temp = StringDifference(parts[count], json, jsonResp.results[0].confidence); precise += temp; Result.Text += json + " - " + temp.ToString("F1") + " %\n"; if (count + 1 < parts.Length) { count++; } else { Result.Text += "Общая точность: " + (precise / parts.Length).ToString("F1") + "%\n"; allDone = true; } } }
public void Stop() { m_waveBufferMutex.WaitOne(); if (m_audioGraph != null) { m_audioGraph.Stop(); m_audioGraph.Dispose(); m_audioGraph = null; } m_waveBufferMutex.ReleaseMutex(); }
public async Task <string> EndRecordAsync() { _graph.Stop(); await _outputNode.FinalizeAsync(); _outputNode = null; _graph.Dispose(); _graph = null; return(_filePath); }
async Task StopRecordingAsync() { if (audioGraph != null) { audioGraph.Stop(); await audioFileOutputNode.FinalizeAsync(); audioGraph.Dispose(); audioGraph = null; Debug.WriteLine("Stopped recording."); } }
public void Pause() { if (_inputNode == null) { return; } _timer?.Dispose(); _timer = null; _audioGraph.Stop(); State = MediaPlaybackState.Paused; }
public async Task ToggleRecordStop() { if (!Recording) //Se não estiver gravando { graph.Start(); } else if (Recording) // Se já estiver gravando { graph.Stop(); //Parar de gravar e salvar o pcm await pcmFileNode.FinalizeAsync(); } Recording = !Recording; }
private void buttonPinFinish_ValueChanged(GpioPin sender, GpioPinValueChangedEventArgs e) { // need to invoke UI updates on the UI thread because this event // handler gets invoked on a separate thread. var task = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { if (e.Edge == GpioPinEdge.FallingEdge) { GpioStatus.Text = "Button Finish Pressed"; if (pIndex == numberOfDigits) { started = 0; if (locked == 1) { if (VerifyPassword()) { locked = 0; if (isAlarmOn) { graph.Stop(); StopLights = true; isAlarmOn = false; var taskDeleteText = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { Text_Status.Text = ""; }); } } } else { locked = 1; } } pIndex = 0; } else { //GpioStatus.Text = "Button ReleasedFinish"; } if (locked == 0) { Status.Text = "Unlocked"; } else { Status.Text = "Locked"; } }); }
private void TogglePlay() { //Toggle playback if (graphButton.Content.Equals("Start Graph")) { graph.Start(); graphButton.Content = "Stop Graph"; } else { graph.Stop(); graphButton.Content = "Start Graph"; } }
public bool HitButton() { isOn = !isOn; if (isOn) { ag.Start(); } else { ag.Stop(); } return(isOn); }
public async void RecordOrStop(int value) { if (value == 1) { secondstimer.Start(); graph.Start(); secondscount = 0; await Recordings.ShowAsync(); } else { secondstimer.Stop(); graph.Stop(); TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync(); if (finalizeResult != TranscodeFailureReason.None) { MessageDialog md = new MessageDialog("Error in saving the audio", "OOPS!!"); await md.ShowAsync(); return; } else { MessageDialog md = new MessageDialog("Sucessfully saved", "Hurray!!"); await md.ShowAsync(); UpdateInCommentSection(storageFile.Name); com1.Add(new comments { empname = pd.emp.name, message = storageFile.Name, dt = DateTime.Now, empid = pd.emp.id, IsFile = true, storagefile = storageFile }); commentsSection.ItemsSource = null; commentsSection.ItemsSource = com1; Recordings.Hide(); } } }
async private void ToggleRecord2(object sender, RoutedEventArgs e) { var btn_record_audio = sender as ToggleButton; if (btn_record_audio.IsChecked == false) { _graph_record.Stop(); _graph_record.Dispose(); await PlayAudio(_target_file); //using the media element to play the sound //var raf_stream = await _target_file.OpenReadAsync(); //media.SetSource(raf_stream, ""); //media.Play(); } else { //initialize the audio graph for recording and then start recording AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media); settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings); if (result.Status == AudioGraphCreationStatus.Success) { _graph_record = result.Graph; //setup the input var input_node = (await _graph_record.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Other)).DeviceInputNode; //setup the output (place where audio will be recorded to) var feedback_folder = await Windows.Storage.ApplicationData.Current.LocalFolder.CreateFolderAsync("AudioFeedback", CreationCollisionOption.OpenIfExists); _target_file = await feedback_folder.CreateFileAsync("audio message.mp3", CreationCollisionOption.GenerateUniqueName); var profile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High); var file_output_node = (await _graph_record.CreateFileOutputNodeAsync(_target_file, profile)).FileOutputNode; //direct the input to the output input_node.AddOutgoingConnection(file_output_node); media.Stop(); //stop playback since we are recording _graph_record.Start(); } else { await new MessageDialog("Could not initialize recorder").ShowAsync(); } } }