private async Task LoadData()
        {
            RingText = "Loading data...";
            RingContentVisibility = true;

            //load video in player
            StorageFile videoFile = await StorageFile.GetFileFromPathAsync(recordModel.PathToVideo);

            MediaPlayerElement player = new MediaPlayerElement();

            player.AreTransportControlsEnabled = true;
            player.Source = MediaSource.CreateFromStorageFile(videoFile);
            VideoPlayer   = player;

            //if db haven't result emotion, then get result from emotion api microsoft cognitive service and save in db
            if (recordModel.ResultEmotion.Count == 0)
            {
                if (string.IsNullOrEmpty(recordModel.OperationLocation))
                {
                    //video should < 100MB
                    var fileProps = await videoFile.GetBasicPropertiesAsync();

                    double sizeFileMb = (double)fileProps.Size / 1024 / 1024;
                    if (sizeFileMb < 100)
                    {
                        await Processing(videoFile);
                    }
                    else
                    {
                        await ErrorMessageDialog("The video file size exceeds 100 MB");

                        GoBackFunc();
                    }
                }
                else
                {
                    operationLocation = recordModel.OperationLocation;
                    VideoAggregateRecognitionResult operationResult = await DetectEmotion(operationLocation);

                    if (operationResult != null)
                    {
                        _videoResult = operationResult;
                        await SaveResultInDb(_videoResult);
                    }
                }
            }

            //crops emotions
            StorageFile emotionsFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Resources/Emoji.png"));

            await GetEmotionImages(emotionsFile);

            //create timer which sync emotion for time
            _emotionSyncTimer          = new DispatcherTimer();
            _emotionSyncTimer.Interval = TimeSpan.FromMilliseconds(500);
            _emotionSyncTimer.Tick    += (e2, s) => { UpdateEmotionForTime(); };
            _emotionSyncTimer.Start();

            RingContentVisibility = false;
        }
        private async Task Processing(StorageFile videoFile)
        {
            VideoAggregateRecognitionResult operationResult = await UploadAndDetectEmotions(videoFile);

            if (operationResult != null)
            {
                _videoResult = operationResult;
                await SaveResultInDb(_videoResult);
            }
        }
        private async Task <VideoAggregateRecognitionResult> DetectEmotion(string operationLocation)
        {
            VideoAggregateRecognitionResult result = null;
            string errorMessage = string.Empty;

            HttpClient client = new HttpClient();

            client.Timeout = TimeSpan.FromMinutes(10);
            client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", _subscriptionKey);

            RingText = "Processing with server status...";
            HttpResponseMessage response = null;
            int attemptNumber            = 1;
            VideoOperationResult videoResult;

            while (true)
            {
                response = await client.SendAsync(new HttpRequestMessage(HttpMethod.Get, operationLocation));

                videoResult = ResponseResult(response);

                if (videoResult != null && (videoResult.Status == VideoOperationStatus.Succeeded || videoResult.Status == VideoOperationStatus.Failed))
                {
                    break;
                }
                if (videoResult == null)
                {
                    errorMessage = ErrorMessage(response, true);
                    break;
                }

                RingText = string.Format("Server status ({0}): {1}, wait {2} seconds...", attemptNumber, videoResult.Status, QueryWaitTime.TotalSeconds);
                attemptNumber++;
                await Task.Delay(QueryWaitTime);
            }

            if (!string.IsNullOrEmpty(errorMessage))
            {
                await ErrorMessageDialog(errorMessage);
            }
            else
            {
                if (videoResult.Status == VideoOperationStatus.Failed)
                {
                    await ErrorMessageDialog("Recognize emotions failed.");
                }
                else
                {
                    result = ((VideoOperationInfoResult <VideoAggregateRecognitionResult>)videoResult).ProcessingResult;
                }
            }

            return(result);
        }
        private async Task SaveResultInDb(VideoAggregateRecognitionResult videoResult)
        {
            RingText = "Save in DB...";
            using (var db = new PrototypingContext())
            {
                Record record = db.Records.Single(r => r.RecordId == recordModel.RecordId);
                record.EmotionVideoTimeScale = videoResult.Timescale;

                List <EmotionFragment> fragments = new List <EmotionFragment>();
                foreach (var videoFragment in videoResult.Fragments)
                {
                    if (videoFragment != null && videoFragment.Events != null)
                    {
                        List <EmotionMeanScores> scores = new List <EmotionMeanScores>();
                        foreach (VideoAggregateEvent[] videoEvents in videoFragment.Events)
                        {
                            if (videoEvents.Length > 0)
                            {
                                Scores videoScores = videoEvents[0].WindowMeanScores;
                                scores.Add(new EmotionMeanScores()
                                {
                                    AngerScore     = videoScores.Anger,
                                    ContemptScore  = videoScores.Contempt,
                                    DisgustScore   = videoScores.Disgust,
                                    FearScore      = videoScores.Fear,
                                    HappinessScore = videoScores.Happiness,
                                    NeutralScore   = videoScores.Neutral,
                                    SadnessScore   = videoScores.Sadness,
                                    SurpriseScore  = videoScores.Surprise
                                });
                            }
                        }
                        fragments.Add(new EmotionFragment()
                        {
                            Duration = videoFragment.Duration,
                            Interval = videoFragment.Interval,
                            Start    = videoFragment.Start,
                            Scores   = scores
                        });
                    }
                }

                record.ResultEmotion = fragments;
                await db.SaveChangesAsync();

                recordModel.EmotionVideoTimeScale = record.EmotionVideoTimeScale;
                recordModel.ResultEmotion         = fragments;
            }
        }
        private async Task <VideoAggregateRecognitionResult> UploadAndDetectEmotions(StorageFile videoFile)
        {
            VideoAggregateRecognitionResult aggResult            = null;
            EmotionServiceClient            emotionServiceClient = new EmotionServiceClient(_subscriptionKey);

            try
            {
                using (Stream videoFileStream = await videoFile.OpenStreamForReadAsync()) //File.OpenRead(videoFilePath)
                {
                    RingText = "Uploading video...";
                    byte[] bytesVideo;
                    using (var memoryStream = new MemoryStream())
                    {
                        await videoFileStream.CopyToAsync(memoryStream);

                        bytesVideo = memoryStream.ToArray();
                    }
                    //get operation id on service
                    operationLocation = await UploadEmotion(bytesVideo);

                    //save operation-location
                    using (var db = new PrototypingContext())
                    {
                        Record record = db.Records.Single(r => r.RecordId == recordModel.RecordId);
                        record.OperationLocation = operationLocation;
                        db.SaveChanges();
                        recordModel.OperationLocation = operationLocation;
                    }
                    //get result emotions from id operation
                    aggResult = await DetectEmotion(operationLocation);
                }
            }
            catch (Exception)
            {
                await ErrorMessageDialog("Oops, error. Check your internet connection.");

                GoBackFunc();
            }


            return(aggResult);
        }
        private async void LoadVideoButton_Click(object sender, RoutedEventArgs e)
        {
            _loadVideoButton.IsEnabled = false;
            MainWindow window = (MainWindow)Application.Current.MainWindow;

            Microsoft.Win32.OpenFileDialog openDlg = new Microsoft.Win32.OpenFileDialog();
            openDlg.Filter = "Video files (*.mp4, *.mov, *.wmv)|*.mp4;*.mov;*.wmv";
            bool?result = openDlg.ShowDialog(window);

            if (!(bool)result)
            {
                return;
            }

            string videoFilePath = openDlg.FileName;
            Uri    videoUri      = new Uri(videoFilePath);

            _videoResultControl.IsWorking = true;
            var operationResult = await UploadAndDetectEmotions(videoFilePath);

            _videoResultControl.IsWorking = false;

            if (operationResult != null)
            {
                _videoResult = operationResult;
                _videoResultControl.SourceUri = videoUri;
                if (_emotionSyncTimer != null)
                {
                    _emotionSyncTimer.Stop();
                }

                _emotionSyncTimer          = new DispatcherTimer();
                _emotionSyncTimer.Interval = TimeSpan.FromMilliseconds(500);
                _emotionSyncTimer.Tick    += (e2, s) => { UpdateEmotionForTime(); };
                _emotionSyncTimer.Start();

                LogVideoFragments();
            }

            _loadVideoButton.IsEnabled = true;
        }
        /// <summary>
        /// Uploads the video to Project Oxford and detects emotions.
        /// </summary>
        /// <param name="videoFilePath">The video file path.</param>
        /// <returns></returns>
        private async Task <VideoAggregateRecognitionResult> UploadAndDetectEmotions(string videoFilePath)
        {
            MainWindow window          = (MainWindow)Application.Current.MainWindow;
            string     subscriptionKey = window.ScenarioControl.SubscriptionKey;

            window.Log("EmotionServiceClient is created");

            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE STARTS HERE
            // -----------------------------------------------------------------------

            //
            // Create Project Oxford Emotion API Service client
            //
            EmotionServiceClient emotionServiceClient = new EmotionServiceClient(subscriptionKey);

            window.Log("Calling EmotionServiceClient.RecognizeInVideoAsync()...");
            try
            {
                using (Stream videoFileStream = File.OpenRead(videoFilePath))
                {
                    //
                    // Upload the video, and tell the server to start recognizing emotions
                    //
                    window.Log("Start uploading video");
                    VideoEmotionRecognitionOperation videoOperation = await emotionServiceClient.RecognizeInVideoAsync(videoFileStream);

                    window.Log("Finished uploading video");


                    //
                    // Starts querying service status
                    //
                    VideoOperationResult result;
                    while (true)
                    {
                        result = await emotionServiceClient.GetOperationResultAsync(videoOperation);

                        if (result.Status == VideoOperationStatus.Succeeded || result.Status == VideoOperationStatus.Failed)
                        {
                            break;
                        }

                        window.Log(string.Format("Server status: {0}, wait {1} seconds...", result.Status, QueryWaitTime.TotalSeconds));
                        await Task.Delay(QueryWaitTime);
                    }

                    window.Log("Finish processing with server status: " + result.Status);

                    //
                    // Processing finished, checks result
                    //
                    if (result.Status == VideoOperationStatus.Succeeded)
                    {
                        //
                        // Get the processing result by casting to the actual operation result
                        //
                        VideoAggregateRecognitionResult aggregateResult = ((VideoOperationInfoResult <VideoAggregateRecognitionResult>)result).ProcessingResult;
                        return(aggregateResult);
                    }
                    else
                    {
                        // Failed
                        window.Log("Fail reason: " + result.Message);
                    }

                    return(null);
                }
            }
            catch (Exception exception)
            {
                window.Log(exception.ToString());
                return(null);
            }
            // -----------------------------------------------------------------------
            // KEY SAMPLE CODE ENDS HERE
            // -----------------------------------------------------------------------
        }