private async Task <VideoOperationResult> InvokeEmotionAPIForVideo(IBuffer buffer, string queryString) { VideoOperationResult result = null; Uri uri = new Uri($"{RecognitionUrl[RecognitionType.Video]}?{queryString}"); HttpBufferContent content = new HttpBufferContent(buffer); content.Headers.ContentType = new HttpMediaTypeHeaderValue("application/octet-stream"); var response = await httpClient.PostAsync(uri, content); if (response.StatusCode == HttpStatusCode.Accepted) { string location = string.Empty; response.Headers.TryGetValue("Operation-Location", out location); if (string.IsNullOrEmpty(location) == false) { Uri operationUri = new Uri(location); var locationResponse = await httpClient.GetAsync(operationUri); string jsonResult = await locationResponse.Content.ReadAsStringAsync(); result = JsonConvert.DeserializeObject <VideoOperationResult>(jsonResult); ProcessVideoResult(result); if (result.Status == VideoOperationStatus.Running) { var task = MonitorVideoProgress(operationUri); } } } return(result); }
private async void OnAnalysisByEmotionAPI(object sender, RoutedEventArgs e) { VideoOperationResult videoResult = null; byte[] data = UtilityHelper.StreamToByteAraray(fileStream.AsStreamForRead()); string key = ""; EmotionAPIService api = new EmotionAPIService(key); api.VideoAnalysisProgressChanged += Api_VideoAnalysisProgressChanged; api.VideoAnalysisSuccessed += Api_VideoAnalysisSuccessed; videoResult = await api.RecognizeVideo(data.AsBuffer(), string.Empty); }
private VideoOperationResult ResponseResult(HttpResponseMessage response) { VideoOperationResult videoResult = null; if (response.StatusCode == System.Net.HttpStatusCode.OK) { string content = response.Content.ReadAsStringAsync().Result; content = content.Replace("\\", ""); if (content.Contains("processingResult")) { int i1 = content.IndexOf("processingResult") + 18; content = content.Remove(i1, 1); int i2 = content.LastIndexOf('}') - 3; content = content.Remove(i2, 3); } videoResult = Newtonsoft.Json.JsonConvert.DeserializeObject <VideoOperationInfoResult <VideoAggregateRecognitionResult> >(content); } return(videoResult); }
private void ProcessVideoResult(VideoOperationResult result) { switch (result.Status) { case VideoOperationStatus.Succeeded: var progressResult = JsonConvert.DeserializeObject <VideoProcessingResult>(result.ProcessingResult); VideoAnalysisSuccessed?.Invoke(this, progressResult); break; case VideoOperationStatus.Running: VideoAnalysisProgressChanged(this, result.Progress); break; case VideoOperationStatus.Uploading: case VideoOperationStatus.Failed: case VideoOperationStatus.NotStarted: break; } }
public VideoOperationInfoResult(VideoOperationResult other, T processingResult) : base(other) { ProcessingResult = processingResult; }
private void ProcessVideoResult(VideoOperationResult result) { switch (result.Status) { case VideoOperationStatus.Succeeded: var progressResult = JsonConvert.DeserializeObject<VideoProcessingResult>(result.ProcessingResult); VideoAnalysisSuccessed?.Invoke(this, progressResult); break; case VideoOperationStatus.Running: VideoAnalysisProgressChanged(this, result.Progress); break; case VideoOperationStatus.Uploading: case VideoOperationStatus.Failed: case VideoOperationStatus.NotStarted: break; } }
public static async Task <Dictionary <string, float> > GetEmotionsVideo(Stream stream) { try { EmotionServiceClient client = new EmotionServiceClient(key); VideoEmotionRecognitionOperation videoOperation = await client.RecognizeInVideoAsync(stream); VideoOperationResult operationResult = await client.GetOperationResultAsync(videoOperation); string ciclo = ""; while (true) { operationResult = await client.GetOperationResultAsync(videoOperation); switch (operationResult.Status) { case VideoOperationStatus.NotStarted: ciclo += "NS"; break; case VideoOperationStatus.Uploading: ciclo += "Upl"; break; case VideoOperationStatus.Running: ciclo += "Run"; break; case VideoOperationStatus.Failed: ciclo += "Fail"; break; case VideoOperationStatus.Succeeded: ciclo += "Succ"; break; default: ciclo += "Def"; break; } ciclo += "_"; if (operationResult.Status == VideoOperationStatus.Succeeded || operationResult.Status == VideoOperationStatus.Failed) { break; } Task.Delay(15000).Wait(); } Dictionary <string, float> dictionary = new Dictionary <string, float>(); Dictionary <string, float> scores = new Dictionary <string, float>(); if (operationResult.Status == VideoOperationStatus.Succeeded) { var info = ((VideoOperationInfoResult <VideoAggregateRecognitionResult>)operationResult).ProcessingResult; int events = 0; if (info.Fragments != null) { foreach (var f in info.Fragments) { if (f.Events != null) { foreach (var evs in f.Events) { foreach (var ev in evs) { if (ev.WindowMeanScores != null) { var meanScores = ev.WindowMeanScores.ToRankedList().ToDictionary(x => x.Key, x => x.Value); events++; foreach (var score in meanScores) { if (dictionary.ContainsKey(score.Key)) { dictionary[score.Key] += score.Value; } else { dictionary.Add(score.Key, score.Value); } } } else { } } } } } } foreach (var emotion in dictionary) { scores.Add(emotion.Key, emotion.Value / events); } } return(scores); } catch (Exception ex) { return(null); } }