private async Task <bool> recognizeSpeechToText() { SyncServer syncRoute = new SyncServer(); bool synced = !await syncRoute.SyncRouteIsNeedAsync(_vpoint.RouteId); if (!synced) { synced = await syncRoute.Sync(_vpoint.RouteId, false); } if (synced) { TokenStoreService tokenService = new TokenStoreService(); string authToken = await tokenService.GetAuthTokenAsync(); var audios = GetUnprocessedAudios(); int index = 0; int count = audios.Count(); SpeechToTextHelper speechToText = new SpeechToTextHelper(authToken); string oldDescription = _vpoint.Description; var sb = new StringBuilder(); foreach (var audio in audios) { string textResult = await speechToText.TryRecognizeAudioAsync(audio.RoutePointMediaObjectId); if (speechToText.LastHttpStatusCode == HttpStatusCode.OK) { sb.AppendLine(string.IsNullOrEmpty(textResult) ? "Текст не распознан" : textResult); ViewRoutePointMediaObject vMediaObject = new ViewRoutePointMediaObject(); vMediaObject.Load(audio.RoutePointMediaObjectId); vMediaObject.Processed = true; vMediaObject.ProcessResultText = textResult; vMediaObject.Save(); } index++; double percent = (double)index * 100 / (double)count / 100; Xamarin.Forms.MessagingCenter.Send <SyncProgressImageLoadingMessage>(new SyncProgressImageLoadingMessage() { RouteId = _vpoint.RouteId, ProgressValue = percent }, string.Empty); } string newDescription = sb.ToString(); if (!string.IsNullOrEmpty(newDescription) && !oldDescription.Equals(newDescription)) { _vpoint.Description += Environment.NewLine + newDescription; _vpoint.Version++; _vpoint.Save(); Device.BeginInvokeOnMainThread(() => { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs("Description")); }); } } return(!string.IsNullOrEmpty(_vpoint.Description)); }
private async Task <bool> updateRouteIsNeeded(string routeId) { SyncServer syncSrv = new SyncServer(); return(await syncSrv.SyncRouteIsNeedAsync(routeId)); }