private async Task ToggleRecordStop() { if (recordStopButton.Content.Equals("Record")) { graph.Start(); recordStopButton.Content = "Stop"; audioPipe1.Fill = new SolidColorBrush(Colors.Blue); audioPipe2.Fill = new SolidColorBrush(Colors.Blue); } else if (recordStopButton.Content.Equals("Stop")) { // Good idea to stop the graph to avoid data loss graph.Stop(); audioPipe1.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); audioPipe2.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49)); TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync(); if (finalizeResult != TranscodeFailureReason.None) { // Finalization of file failed. Check result code to see why rootPage.NotifyUser(String.Format("Finalization of file failed because {0}", finalizeResult.ToString()), NotifyType.ErrorMessage); fileButton.Background = new SolidColorBrush(Colors.Red); return; } recordStopButton.Content = "Record"; rootPage.NotifyUser("Recording to file completed successfully!", NotifyType.StatusMessage); fileButton.Background = new SolidColorBrush(Colors.Green); recordStopButton.IsEnabled = false; createGraphButton.IsEnabled = false; } }
public async Task <string> EndRecordAsync() { _graph.Stop(); await _outputNode.FinalizeAsync(); _outputNode = null; _graph.Dispose(); _graph = null; return(_filePath); }
private async void RecordStart(object sender, TappedRoutedEventArgs e) { if (!recordButtonPushed) { recordButtonPushed = true; __start_record_button.Source = new BitmapImage(new Uri(this.BaseUri, "Assets/mic-512_pushed.png")); await CreateAudioGraph(); graph.Start(); } else { recordButtonPushed = false; __start_record_button.Source = new BitmapImage(new Uri(this.BaseUri, "Assets/mic-512.png")); graph.Stop(); TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync(); if (finalizeResult != TranscodeFailureReason.None) { // Finalization of file failed. Check result code to see why return; } Guid requestId = Guid.NewGuid(); var Uri = @"https://speech.platform.bing.com/recognize?version=3.0&requestid=" + requestId.ToString() + @"&appID=D4D52672-91D7-4C74-8AD8-42B1D981415A&format=json&locale=en-US&device.os=Windows%20OS&scenarios=ulm&instanceid=f1efbd27-25fd-4212-9332-77cd63176112"; var resp = SendRequestAsync(Uri, accessToken, "audio/wav; samplerate=16000", path); string json = resp; ParsedJson jsonResp = JsonConvert.DeserializeObject <ParsedJson>(json); json = jsonResp.header.lexical.Replace("<profanity>", ""); json = json.Replace("</profanity>", ""); if (allDone) { precise = 0; count = 0; Result.Text = ""; allDone = false; } var temp = StringDifference(parts[count], json, jsonResp.results[0].confidence); precise += temp; Result.Text += json + " - " + temp.ToString("F1") + " %\n"; if (count + 1 < parts.Length) { count++; } else { Result.Text += "Общая точность: " + (precise / parts.Length).ToString("F1") + "%\n"; allDone = true; } } }
async Task StopRecordingAsync() { if (audioGraph != null) { audioGraph.Stop(); await audioFileOutputNode.FinalizeAsync(); audioGraph.Dispose(); audioGraph = null; Debug.WriteLine("Stopped recording."); } }
public async Task ToggleRecordStop() { if (!Recording) //Se não estiver gravando { graph.Start(); } else if (Recording) // Se já estiver gravando { graph.Stop(); //Parar de gravar e salvar o pcm await pcmFileNode.FinalizeAsync(); } Recording = !Recording; }
/// <summary> /// 레코딩 종료 /// </summary> /// <returns></returns> public async Task StopRecordingAsync() { if (_audioGraph == null) { throw new NullReferenceException("You have to start recording first !"); } if (_outputFilename == null) { throw new NullReferenceException("You have to start recording first !"); } _audioGraph.Stop(); await _audioFileOutputNode.FinalizeAsync(); }
private void SaveAndCleanGraph() { new TaskFactory().StartNew(async() => { var creationTask = _storageFolder.CreateFileAsync(Guid.NewGuid() + ".wav", Windows.Storage.CreationCollisionOption.ReplaceExisting); var file = await creationTask; CopyFile(_mainFile, file); await _fileOutputNode.FinalizeAsync(); _audioGraph.Stop(); _audioGraph.Dispose(); await InitGraph(); }); }
public async Task StopRecording() { if (_audioGraph == null) { return; } _audioGraph.Stop(); var finalizeResult = await _fileOutputNode.FinalizeAsync(); if (finalizeResult != TranscodeFailureReason.None) { await new MessageDialog("Finalization of file failed: " + finalizeResult).ShowAsync(); // return; } _audioGraph.Dispose(); _audioGraph = null; }
public async void RecordOrStop(int value) { if (value == 1) { secondstimer.Start(); graph.Start(); secondscount = 0; await Recordings.ShowAsync(); } else { secondstimer.Stop(); graph.Stop(); TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync(); if (finalizeResult != TranscodeFailureReason.None) { MessageDialog md = new MessageDialog("Error in saving the audio", "OOPS!!"); await md.ShowAsync(); return; } else { MessageDialog md = new MessageDialog("Sucessfully saved", "Hurray!!"); await md.ShowAsync(); UpdateInCommentSection(storageFile.Name); com1.Add(new comments { empname = pd.emp.name, message = storageFile.Name, dt = DateTime.Now, empid = pd.emp.id, IsFile = true, storagefile = storageFile }); commentsSection.ItemsSource = null; commentsSection.ItemsSource = com1; Recordings.Hide(); } } }
SaveAudioToFile( StorageFile file, IProgress <string> status) { _finished = false; status.Report("Saving audio to file"); var mediaEncodingProfile = CreateMediaEncodingProfile(file); if (!_audioData.IsStereo && mediaEncodingProfile.Audio != null) { mediaEncodingProfile.Audio.ChannelCount = 1; } // Initialize FileOutputNode var result = await _audioGraph.CreateFileOutputNodeAsync(file, mediaEncodingProfile); if (result.Status != AudioFileNodeCreationStatus.Success) { return(result); } _fileOutputNode = result.FileOutputNode; _fileOutputNode.Stop(); // Initialize FrameInputNode and connect it to fileOutputNode _frameInputNode = _audioGraph.CreateFrameInputNode( // EncodingProprties are different than for input file _fileOutputNode.EncodingProperties //audioEncodingProperties ); _frameInputNode.AddOutgoingConnection(_fileOutputNode); _frameInputNode.Stop(); // Add a handler which will transfer every audioData sample to audio frame _frameInputNode.QuantumStarted += FrameInputNode_QuantumStarted; _audioDataCurrentPosition = 0; // Start process which will write audio file frame by frame // and will generated events QuantumStarted _audioGraph.Start(); // don't start fileOutputNode yet because it will record zeros // because we initialised frameInputNode in Stop mode we need to start it _frameInputNode.Start(); // didn't find a better way to wait for writing to file while (!_finished) { await Task.Delay(50); } // when audioData samples ended and audioGraph already stoped await _fileOutputNode.FinalizeAsync(); // clean status and progress status.Report(""); _ioProgress.Report(0); return(result); }
public MainPageViewModel() { _musicFiles = new ObservableCollection <StorageFile>(); CurrentFile = new GroorineFileViewModel(null); //_player = new Player(); InitializeAsync(); DeleteCommand = new DelegateCommand(async(o) => { if (!(o is StorageFile)) { return; } var sf = o as StorageFile; MusicFiles.Remove(sf); await sf.DeleteAsync(); }); ExportCommand = new DelegateCommand(async(o) => { if (!(o is StorageFile)) { return; } var sf = o as StorageFile; var fsp = new FileSavePicker(); fsp.FileTypeChoices.Add("Wave Audio", new List <string> { ".wav" }); fsp.FileTypeChoices.Add("Windows Media Audio", new List <string> { ".wma" }); fsp.FileTypeChoices.Add("MPEG 3 Audio", new List <string> { ".mp3" }); fsp.FileTypeChoices.Add("MPEG 4 Audio", new List <string> { ".m4a" }); fsp.SuggestedFileName = sf.DisplayName; fsp.CommitButtonText = "Bounce"; StorageFile file = await fsp.PickSaveFileAsync(); if (file == null) { return; } MediaEncodingProfile mediaEncodingProfile; switch (file.FileType.ToString().ToLowerInvariant()) { case ".wma": mediaEncodingProfile = MediaEncodingProfile.CreateWma(AudioEncodingQuality.High); break; case ".mp3": mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High); break; case ".wav": mediaEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High); break; case ".m4a": mediaEncodingProfile = MediaEncodingProfile.CreateM4a(AudioEncodingQuality.High); break; default: throw new ArgumentException(); } CreateAudioFileOutputNodeResult result = await _graph.CreateFileOutputNodeAsync(file, mediaEncodingProfile); if (result.Status != AudioFileNodeCreationStatus.Success) { // FileOutputNode creation failed await new MessageDialog("We couldn't create FileOutputNode, so we failed to bounce.").ShowAsync(); return; } AudioFileOutputNode node = result.FileOutputNode; _graph.Stop(); _frameInputNode.AddOutgoingConnection(node); Stop(); _player.Load(SmfParser.Parse(await sf.OpenStreamForReadAsync())); Play(); _graph.Start(); var a = new BouncingDialog(); #pragma warning disable CS4014 // この呼び出しを待たないため、現在のメソッドの実行は、呼び出しが完了する前に続行します a.ShowAsync(); #pragma warning restore CS4014 // この呼び出しを待たないため、現在のメソッドの実行は、呼び出しが完了する前に続行します while (_player.IsPlaying) { await Task.Delay(1); } _graph.Stop(); await node.FinalizeAsync(); _graph.Start(); a.Hide(); await new MessageDialog("Bouncing has successfully finished!").ShowAsync(); }); }