public async Task <VideoOperationResult> GetOperationResultAsync(VideoEmotionRecognitionOperation operation)
        {
            var wireResult = await GetAsync <string, VideoOperationInfoResult <string> >(operation.Url, null);

            if (wireResult.Status != VideoOperationStatus.Succeeded)
            {
                return(wireResult);
            }
            var aggregateResult = JsonConvert.DeserializeObject <VideoAggregateRecognitionResult>(wireResult.ProcessingResult);

            return(new VideoOperationInfoResult <VideoAggregateRecognitionResult>(wireResult, aggregateResult));
        }
Exemplo n.º 2
0
        /// <summary>
        /// Get emotion video operation result.
        /// </summary>
        /// <param name="operation">Opaque operation object, from RecognizeInVideoAsync response.</param>
        /// <returns>
        /// The output type will vary depending on the outputStyle requested.  For example, if you requested <code>VideoOutputStyle.Aggregate</code>
        /// (default), you would get a VideoOperationInfoResult&lt;VideoAggregateRecognitionResult&gt; object.
        /// <code>
        /// var result = await GetOperationResultAsync(operation);
        /// if (result.Status == VideoOperationStatus.Succeeded)
        /// {
        ///     var details = result as VideoOperationInfoResult&lt;VideoAggregateRecognitionResult&gt
        ///     ...
        /// }
        /// </code>
        /// <code>ProcessResult</code>
        /// </returns>
        public async Task <VideoOperationResult> GetOperationResultAsync(VideoEmotionRecognitionOperation operation)
        {
            var wireResult = await GetAsync <string, VideoOperationInfoResult <string> >(operation.Url, null);

            // The wire-result holds the key result information in a string, deserialize it here so clients
            // don't have to invoke JsonConvert.Deserialize() themselves.

            if (wireResult.Status == VideoOperationStatus.Succeeded)
            {
                var aggregateResult = JsonConvert.DeserializeObject <VideoAggregateRecognitionResult>(wireResult.ProcessingResult);
                return(new VideoOperationInfoResult <VideoAggregateRecognitionResult>(wireResult, aggregateResult));
            }

            return(wireResult);
        }
        public async Task <IActionResult> React(long youtubeVideoDescriptionID, IFormFile recordedVideo)
        {
            if (null == recordedVideo && Request.Form.Files.Count > 0)
            {
                recordedVideo = Request.Form.Files[0];
            }



            var videoDescription = await db.YoutubeVideoDescriptions.FirstOrDefaultAsync(x => x.ID == youtubeVideoDescriptionID);

            string url = await PersistVideoFile(recordedVideo);

            VideoEmotionRecognitionOperation recognizeResult = null;

            try
            {
                recognizeResult = await emotionService.RecognizeInVideoAsync(url);
            }
            catch (Exception ex)
            {
            }

            Reaction reaction = new Reaction();

            reaction.DateCreated = DateTimeOffset.Now;
            reaction.YoutubeVideoDescriptionID = youtubeVideoDescriptionID;
            reaction.OperationUrl = recognizeResult.Url;
            reaction.ApiKey       = recognizeResult.ApiKey;
            db.Reactions.Add(reaction);
            await db.SaveChangesAsync();


            return(Json(new
            {
                success = true,
                redirectUrl = Url.Action("Results", "Default", new { youtubeVideoDescriptionID = youtubeVideoDescriptionID, reactionID = reaction.ID })
            }));
            //     return RedirectToAction("Results", "Default", new { youtubeVideoDescriptionID = youtubeVideoDescriptionID, reactionID = reaction.ID });
        }
        /// <summary>
        /// Execute emotion analysis on a video stream
        /// Starts a new task to get the operation results
        /// </summary>
        /// <param name="videoStream">Video Stream to analyze</param>
        /// <returns></returns>
        public async Task ExecuteVideoEmotionAnalysis(Stream videoStream)
        {
            try
            {
                VideoEmotionRecognitionOperation operation = await _emotionServiceClient.RecognizeInVideoAsync(videoStream);

                if (operation == null)
                {
                    RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Failed", "Failed to analyze emotions in video"));
                    return;
                }

                await Task.Run(() => GetVideoEmotionResultAsync(operation));
            }
            catch (ClientException ex)
            {
                RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Failed", $"Failed to execute video operation: {ex.Error.Message}"));
            }
            catch (Exception ex)
            {
                RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Failed", $"Failed to execute video operation: {ex.Message}"));
            }
        }
        /// <summary>
        /// Get emotion video operation result.
        /// </summary>
        /// <param name="operation">Opaque operation object, from RecognizeInVideoAsync response.</param>
        /// <returns>
        /// The output type will vary depending on the outputStyle requested.  For example, if you requested <code>VideoOutputStyle.Aggregate</code>
        /// (default), you would get a VideoOperationInfoResult&lt;VideoAggregateRecognitionResult&gt; object.
        /// <code>
        /// var result = await GetOperationResultAsync(operation);
        /// if (result.Status == VideoOperationStatus.Succeeded)
        /// {
        ///     var details = result as VideoOperationInfoResult&lt;VideoAggregateRecognitionResult&gt
        ///     ...
        /// }
        /// </code>
        /// <code>ProcessResult</code>
        /// </returns>
        public async Task<VideoOperationResult> GetOperationResultAsync(VideoEmotionRecognitionOperation operation)
        {
            var wireResult = await GetAsync<string, VideoOperationInfoResult<string>>(operation.Url, null);

            // The wire-result holds the key result information in a string, deserialize it here so clients
            // don't have to invoke JsonConvert.Deserialize() themselves.

            if (wireResult.Status == VideoOperationStatus.Succeeded)
            {
                var aggregateResult = JsonConvert.DeserializeObject<VideoAggregateRecognitionResult>(wireResult.ProcessingResult);
                return new VideoOperationInfoResult<VideoAggregateRecognitionResult>(wireResult, aggregateResult);
            }

            return wireResult;
        }
        /// <summary>
        /// Uploads the video to Project Oxford and detects emotions.
        /// </summary>
        /// <param name="videoFilePath">The video file path.</param>
        /// <returns></returns>
        private async Task <VideoAggregateRecognitionResult> UploadAndDetectEmotions(string videoFilePath)
        {
            MainWindow window          = (MainWindow)Application.Current.MainWindow;
            string     subscriptionKey = window.ScenarioControl.SubscriptionKey;

            window.Log("EmotionServiceClient is created");

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Emotion API Service client
            //
            EmotionServiceClient emotionServiceClient = new EmotionServiceClient(subscriptionKey);

            window.Log("Calling EmotionServiceClient.RecognizeInVideoAsync()...");
            try
            {
                using (Stream videoFileStream = File.OpenRead(videoFilePath))
                {
                    //
                    // Upload the video, and tell the server to start recognizing emotions
                    //
                    window.Log("Start uploading video");
                    VideoEmotionRecognitionOperation videoOperation = await emotionServiceClient.RecognizeInVideoAsync(videoFileStream);

                    window.Log("Finished uploading video");


                    //
                    // Starts querying service status
                    //
                    VideoOperationResult result;
                    while (true)
                    {
                        result = await emotionServiceClient.GetOperationResultAsync(videoOperation);

                        if (result.Status == VideoOperationStatus.Succeeded || result.Status == VideoOperationStatus.Failed)
                        {
                            break;
                        }

                        window.Log(string.Format("Server status: {0}, wait {1} seconds...", result.Status, QueryWaitTime.TotalSeconds));
                        await Task.Delay(QueryWaitTime);
                    }

                    window.Log("Finish processing with server status: " + result.Status);

                    //
                    // Processing finished, checks result
                    //
                    if (result.Status == VideoOperationStatus.Succeeded)
                    {
                        //
                        // Get the processing result by casting to the actual operation result
                        //
                        VideoAggregateRecognitionResult aggregateResult = ((VideoOperationInfoResult <VideoAggregateRecognitionResult>)result).ProcessingResult;
                        return(aggregateResult);
                    }
                    else
                    {
                        // Failed
                        window.Log("Fail reason: " + result.Message);
                    }

                    return(null);
                }
            }
            catch (Exception exception)
            {
                window.Log(exception.ToString());
                return(null);
            }
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------
        }
Exemplo n.º 7
0
        public static async Task <Dictionary <string, float> > GetEmotionsVideo(Stream stream)
        {
            try
            {
                EmotionServiceClient             client         = new EmotionServiceClient(key);
                VideoEmotionRecognitionOperation videoOperation = await client.RecognizeInVideoAsync(stream);

                VideoOperationResult operationResult = await client.GetOperationResultAsync(videoOperation);

                string ciclo = "";

                while (true)
                {
                    operationResult = await client.GetOperationResultAsync(videoOperation);

                    switch (operationResult.Status)
                    {
                    case VideoOperationStatus.NotStarted: ciclo += "NS"; break;

                    case VideoOperationStatus.Uploading: ciclo += "Upl"; break;

                    case VideoOperationStatus.Running: ciclo += "Run"; break;

                    case VideoOperationStatus.Failed: ciclo += "Fail"; break;

                    case VideoOperationStatus.Succeeded: ciclo += "Succ"; break;

                    default: ciclo += "Def"; break;
                    }

                    ciclo += "_";

                    if (operationResult.Status == VideoOperationStatus.Succeeded || operationResult.Status == VideoOperationStatus.Failed)
                    {
                        break;
                    }

                    Task.Delay(15000).Wait();
                }

                Dictionary <string, float> dictionary = new Dictionary <string, float>();
                Dictionary <string, float> scores     = new Dictionary <string, float>();

                if (operationResult.Status == VideoOperationStatus.Succeeded)
                {
                    var info   = ((VideoOperationInfoResult <VideoAggregateRecognitionResult>)operationResult).ProcessingResult;
                    int events = 0;

                    if (info.Fragments != null)
                    {
                        foreach (var f in info.Fragments)
                        {
                            if (f.Events != null)
                            {
                                foreach (var evs in f.Events)
                                {
                                    foreach (var ev in evs)
                                    {
                                        if (ev.WindowMeanScores != null)
                                        {
                                            var meanScores = ev.WindowMeanScores.ToRankedList().ToDictionary(x => x.Key, x => x.Value);
                                            events++;

                                            foreach (var score in meanScores)
                                            {
                                                if (dictionary.ContainsKey(score.Key))
                                                {
                                                    dictionary[score.Key] += score.Value;
                                                }
                                                else
                                                {
                                                    dictionary.Add(score.Key, score.Value);
                                                }
                                            }
                                        }
                                        else
                                        {
                                        }
                                    }
                                }
                            }
                        }
                    }

                    foreach (var emotion in dictionary)
                    {
                        scores.Add(emotion.Key, emotion.Value / events);
                    }
                }

                return(scores);
            }
            catch (Exception ex)
            {
                return(null);
            }
        }
        /// <summary>
        /// Check the current status of the video emotion operation. If the processing is not completed (which it can be from failing or succeeding)
        /// we will check again in 20 seconds. If the process has succeeded, the results is sent with an event
        /// </summary>
        /// <param name="videoOperation">The current video <see cref="VideoEmotionRecognitionOperation"/>, containing location for the operation status</param>
        private async void GetVideoEmotionResultAsync(VideoEmotionRecognitionOperation videoOperation)
        {
            try
            {
                while (true)
                {
                    var operationResult = await _emotionServiceClient.GetOperationResultAsync(videoOperation);

                    bool isCompleted = false;

                    switch (operationResult.Status)
                    {
                    case VideoOperationStatus.Failed:
                        RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Failed", $"Video operation failed: {operationResult.Message}"));
                        isCompleted = true;
                        break;

                    case VideoOperationStatus.NotStarted:
                        RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Not started", "Video operation has not started yet"));
                        break;

                    case VideoOperationStatus.Running:
                    default:
                        RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Running", "Video operation is running"));
                        break;

                    case VideoOperationStatus.Uploading:
                        RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Uploading", "Video is uploading"));
                        break;

                    case VideoOperationStatus.Succeeded:
                        var result = operationResult as VideoOperationInfoResult <VideoAggregateRecognitionResult>;
                        RaiseVideoOperationCompleted(new VideoOperationResultEventArgs
                        {
                            Status        = "Succeeded",
                            Message       = "Video operation completed successfully",
                            EmotionResult = result.ProcessingResult,
                        });
                        isCompleted = true;
                        break;
                    }

                    if (isCompleted)
                    {
                        break;
                    }
                    else
                    {
                        await Task.Delay(TimeSpan.FromSeconds(20));
                    }
                }
            }
            catch (ClientException ex)
            {
                RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Failed", $"Failed to execute video operation: {ex.Error.Message}"));
            }
            catch (Exception ex)
            {
                RaiseVideoOperationStatus(new VideoOperationStatusEventArgs("Failed", $"Failed to get video operation result: {ex.Message}"));
            }
        }