private async Task <bool> recognizeSpeechToText() { SyncServer syncRoute = new SyncServer(); bool synced = !await syncRoute.SyncRouteIsNeedAsync(_vpoint.RouteId); if (!synced) { synced = await syncRoute.Sync(_vpoint.RouteId, false); } if (synced) { TokenStoreService tokenService = new TokenStoreService(); string authToken = await tokenService.GetAuthTokenAsync(); var audios = GetUnprocessedAudios(); int index = 0; int count = audios.Count(); SpeechToTextHelper speechToText = new SpeechToTextHelper(authToken); string oldDescription = _vpoint.Description; var sb = new StringBuilder(); foreach (var audio in audios) { string textResult = await speechToText.TryRecognizeAudioAsync(audio.RoutePointMediaObjectId); if (speechToText.LastHttpStatusCode == HttpStatusCode.OK) { sb.AppendLine(string.IsNullOrEmpty(textResult) ? "Текст не распознан" : textResult); ViewRoutePointMediaObject vMediaObject = new ViewRoutePointMediaObject(); vMediaObject.Load(audio.RoutePointMediaObjectId); vMediaObject.Processed = true; vMediaObject.ProcessResultText = textResult; vMediaObject.Save(); } index++; double percent = (double)index * 100 / (double)count / 100; Xamarin.Forms.MessagingCenter.Send <SyncProgressImageLoadingMessage>(new SyncProgressImageLoadingMessage() { RouteId = _vpoint.RouteId, ProgressValue = percent }, string.Empty); } string newDescription = sb.ToString(); if (!string.IsNullOrEmpty(newDescription) && !oldDescription.Equals(newDescription)) { _vpoint.Description += Environment.NewLine + newDescription; _vpoint.Version++; _vpoint.Save(); Device.BeginInvokeOnMainThread(() => { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs("Description")); }); } } return(!string.IsNullOrEmpty(_vpoint.Description)); }
public async void RunActionOnRecordAsync(string filePath) { // We run a speech to Text recognition var message = await SpeechToTextHelper.RunSpeechToTextAsync(filePath, SpeakerLanguage); if (!message.MessageText.Equals("N/A")) { var speakerLabel = "Not yet identified"; // we insert the message without the speaker identification await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { Messages.Insert(0, new Message(speakerLabel, message.MessageText) { FilePath = filePath }); }); try { Color backgroundColor = Color.FromArgb(128, 128, 128, 128); Color borderColor = Color.FromArgb(255, 68, 68, 68); IEnumerable <string> speakerIds = from sp in Speakers where !sp.EnrollmentStatus.Equals("Enrolling") select sp.SpeakerId.ToString(); // We run a speaker recognition var identificationToken = await identificationHelper.IdentifySpeakerAsync(filePath, speakerIds); Speaker speaker = null; if (!identificationToken.Status.Equals("failed")) { Guid identifiedId = Guid.Parse(identificationToken.ProcessingResult.IdentificationProfileId); if (identifiedId != Guid.Empty) { speaker = Speakers.FirstOrDefault(s => s.SpeakerId == identifiedId); speakerLabel = speaker.SpeakerName + " (" + identificationToken.ProcessingResult.Confidence + ")"; backgroundColor = speaker.DialogColor; backgroundColor.A = 128; borderColor = speaker.DialogColor; } else { speakerLabel = "Speaker not identified"; } } else { speakerLabel = "Speaker not identified"; } // we update the message with the speaker identified await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { var item = Messages.FirstOrDefault(m => m.MessageText.Equals(message.MessageText)); if (item != null) { int i = Messages.IndexOf(item); var msg = new Message() { FilePath = item.FilePath, SpeakerName = speakerLabel, SpeakerStyle = borderColor.ToString(), SpeakerColor = backgroundColor.ToString(), MessageText = item.MessageText }; Messages[i] = msg; } }); } catch (Exception ex) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { Messages.Insert(0, new Message("Error", ex.Message)); }); } //File.Delete(filePath); } }