private void SetUpListenerNewResultFromAPICall()
        {
            _frameGrabber.NewResultAvailable += (s, e) =>
            {
                Application.Current.Dispatcher.BeginInvoke((System.Action)(() =>
                {
                    if (e.TimedOut)
                    {
                        //MessageArea.Text = "API call timed out.";
                    }
                    else if (e.Exception != null)
                    {
                        //MessageArea.Text = "API Exception Message.";
                    }
                    else
                    {
                        _currentLiveCameraResult = e.Analysis;

                        if (_currentLiveCameraResult.Faces.Length > 0)
                        {
                            var faceAttributes = _currentLiveCameraResult.Faces[0].FaceAttributes;
                            _eventAggregator.PublishOnUIThread(new FaceAttributesResultEvent()
                            {
                                FaceAttributesResult = faceAttributes
                            });
                        }
                    }
                }));
            };
        }
Пример #2
0
        private async Task <LiveCameraResult> SubmitFacesAnalysisFunction(VideoFrame frame)
        {
            var result = new LiveCameraResult();

            var frameImage = frame.Image.ToMemoryStream(".jpg", ImageEncodingParameter.JpegParams);
            var faces      = await DetectFacesFromImage(frameImage, _faceAttributes);

            result.Faces = faces;

            return(result);
        }
Пример #3
0
        private async Task <LiveCameraResult> SubmitVisionAnalysisFunction(VideoFrame frame)
        {
            var result = new LiveCameraResult();

            var frameImage = frame.Image.ToMemoryStream(".jpg", ImageEncodingParameter.JpegParams);
            var tags       = await AnalyzeImageBySpecificVisualFeatures(frameImage, VisualFeature.Tags);

            result.Tags = tags;

            return(result);
        }
Пример #4
0
        private async Task <LiveCameraResult> SubmitEmotionsAnalysisFunction(VideoFrame frame)
        {
            var result = new LiveCameraResult();

            var frameImage = frame.Image.ToMemoryStream(".jpg", ImageEncodingParameter.JpegParams);;
            var emotions   = await RecognizeEmotionsFromImage(frameImage);

            var emotionScores = emotions.Select(e => e.Scores).ToArray();

            result.EmotionScores = emotionScores;

            return(result);
        }
Пример #5
0
        public MainWindow()
        {
            InitializeComponent();

            _grabber                = new FrameGrabber();
            _localFaceDetector      = new CascadeClassifier();
            _latestResultsToDisplay = null;
            _facesGuids             = new List <Guid>();
            _statisticsWindow       = new StatisticsWindow();
            StatisticsData          = new StatisticsData();

            InitEvents();

            _localFaceDetector.Load("Data/haarcascade_frontalface_alt2.xml");
        }
        private BitmapSource VisualizeResult(VideoFrame videoFrame, LiveCameraResult currentLiveCameraResult)
        {
            var result = videoFrame.Image.ToBitmapSource();

            if (currentLiveCameraResult != null)
            {
                var clientFaces = (OpenCvSharp.Rect[])videoFrame.UserData;
                if (clientFaces != null && currentLiveCameraResult.Faces != null)
                {
                    var faces = currentLiveCameraResult.Faces;
                    _openCVService.MatchAndReplaceFaces(faces, clientFaces);
                    result = DrawRectangleContext(result, faces);
                }
            }

            return(result);
        }
Пример #7
0
        /// <summary>
        /// Inits all events
        /// </summary>
        private void InitEvents()
        {
            //new frame
            _grabber.NewFrameProvided += (s, e) =>
            {
                e.Frame.Rectangles = _localFaceDetector.DetectMultiScale(e.Frame.Image);

                Dispatcher.BeginInvoke((Action)(() =>
                {
                    if (_isFuseClientRemoteResults)
                    {
                        DisplayImage.Source = VisualizeResult(e.Frame);
                    }
                }));
            };

            // recive result from api
            _grabber.NewResultAvailable += (s, e) =>
            {
                Dispatcher.BeginInvoke((Action)(() =>
                {
                    if (e.TimedOut)
                    {
                        MessageArea.Text = "API call timed out.";
                    }
                    else if (e.Exception is FaceAPI.FaceAPIException)
                    {
                        MessageArea.Text =
                            $"Face API call failed on frame {e.Frame.VideoFrameMetadata.TimeStamp}. Exception: " +
                            e.Exception.Message;
                    }
                    else
                    {
                        _latestResultsToDisplay = e.Analysis;

                        // Display the image and visualization in the right pane.
                        if (!_isFuseClientRemoteResults)
                        {
                            DisplayImage.Source = VisualizeResult(e.Frame);
                        }
                    }
                }));
            };
        }
Пример #8
0
        /// <summary>
        /// Fuses the api and local detection and renders the new picture
        /// </summary>
        /// <param name="frame">Frame to do so</param>
        /// <returns>New bitmapsource where stuff has been done</returns>
        private BitmapSource VisualizeResult(VideoFrame frame)
        {
            BitmapSource visImage = frame.Image.ToBitmapSource();
            //todo test
            LiveCameraResult result = _latestResultsToDisplay;

            if (result != null)
            {
                Rect[] clientFaces = frame.Rectangles;
                if (clientFaces != null && result.Faces != null)
                {
                    MatchAndReplaceFaceRectangles(result.Faces, clientFaces);
                }

                visImage = Visualization.DrawOverlay(visImage, result.Faces, result.EmotionScores);
            }

            return(visImage);
        }
 public BitmapSource Visualize(VideoFrame videoFrame, LiveCameraResult currentLiveCameraResult)
 {
     return(VisualizeResult(videoFrame, currentLiveCameraResult));
 }