Esempio n. 1
0
        async Task CaptureAndProcessEmotionAsync()
        {
            try
            {
                await Dispatcher.RunAsync(CoreDispatcherPriority.High, async() =>
                {
                    var emotions     = await CaptureEmotionAsync();
                    var mostProbable =
                        emotions.ToResults()
                        .Where(result => result != Result.Empty)
                        .FirstOrDefault();

                    _emoticon.Text = Emoticons.From(mostProbable.Emotion);
                    var current    = _messageLabel.Text;
                    var message    =
                        EmotionMessages.Messages[mostProbable.Emotion]
                        .First(msg => msg != current);

                    _messageLabel.Text = message;

                    await ChangeStreamStateAsync(false);
                    await _speechEngine.SpeakAsync(message, _speaker);
                    await _speechEngine.SetRecognitionModeAsync(SpeechRecognitionMode.CommandPhrases);
                });
            }
            catch (Exception ex) when(DebugHelper.IsHandled <MainPage>(ex))
            {
            }
        }
Esempio n. 2
0
        async void ProcessCurrentVideoFrame(object sender, object e)
        {
            // If a lock is being held it means we're still waiting for processing work on the previous frame to complete.
            // In this situation, don't wait on the semaphore but exit immediately.
            if (!_isStreaming || !_frameProcessingSemaphore.Wait(0))
            {
                return;
            }

            try
            {
                using (var previewFrame = new VideoFrame(BitmapPixelFormat.Nv12,
                                                         (int)_videoProperties.Width,
                                                         (int)_videoProperties.Height))
                {
                    await _mediaManager.GetPreviewFrameAsync(previewFrame);

                    IList <DetectedFace> faces = null;

                    // The returned VideoFrame should be in the supported NV12 format but we need to verify this.
                    if (FaceDetector.IsBitmapPixelFormatSupported(previewFrame.SoftwareBitmap.BitmapPixelFormat))
                    {
                        faces = await _faceTracker.ProcessNextFrameAsync(previewFrame);
                    }

                    //// Create our visualization using the frame dimensions and face results but run it on the UI thread.
                    var previewFrameSize = new Size(previewFrame.SoftwareBitmap.PixelWidth, previewFrame.SoftwareBitmap.PixelHeight);
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                    {
                        SetupVisualization(previewFrameSize, faces);

                        if (_isProcessing)
                        {
                            return;
                        }

                        var emotions = await CaptureEmotionAsync();
                        if (emotions.IsNullOrEmpty() == false)
                        {
                            var mostProbable =
                                emotions.ToResults()
                                .Where(result => result != Result.Empty)
                                .FirstOrDefault();

                            if (mostProbable == null)
                            {
                                _messageLabel.Text = string.Empty;
                                _emoticon.Text     = string.Empty;
                            }
                            else
                            {
                                _emoticon.Text = Emoticons.From(mostProbable.Emotion);

                                var current = _messageLabel.Text;
                                var message = EmotionMessages.Messages[mostProbable.Emotion].RandomElement();
                                while (current == message)
                                {
                                    message = EmotionMessages.Messages[mostProbable.Emotion].RandomElement();
                                }
                                _messageLabel.Text = message;
                                await _speechEngine.SpeakAsync(message, _speaker);

                                ++_captureCounter;
                                if (_captureCounter >= MaxCaptureBeforeReset)
                                {
                                    await ChangeStreamStateAsync(false);
                                }
                            }
                        }
                    });
                }
            }
            catch (Exception ex) when(DebugHelper.IsHandled <MainPage>(ex))
            {
            }
            finally
            {
                _frameProcessingSemaphore.Release();
            }
        }