示例#1
0
        private BitmapSource VisualizeResult(VideoFrame frame)
        {
            // Draw any results on top of the image.
            BitmapSource visImage = frame.Image.ToBitmapSource();

            var result = _latestResultsToDisplay;

            if (result != null)
            {
                // See if we have local face detections for this image.
                var clientFaces = (OpenCvSharp.Rect[])frame.UserData;
                if (clientFaces != null && result.Faces != null)
                {
                    // If so, then the analysis results might be from an older frame. We need to match
                    // the client-side face detections (computed on this frame) with the analysis
                    // results (computed on the older frame) that we want to display.
                    MatchAndReplaceFaceRectangles(result.Faces, clientFaces);
                }

                visImage = Visualization.DrawFaces(visImage, result.Faces, result.EmotionScores, result.CelebrityNames);
                visImage = Visualization.DrawTags(visImage, result.Tags);
            }

            return(visImage);
        }
示例#2
0
        private BitmapSource VisualizeEndRound(VideoFrame frame)
        {
            var    bitmap = VisualizeRound(frame);
            string s      = "Round Score:\n";

            return(Visualization.DrawRoundEnd(bitmap, scoringSystem.CurrentRoundScore, playerImages, scoringSystem.TotalScore));
        }
示例#3
0
        private BitmapSource VisualizeEndGame(VideoFrame frame)
        {
            var bitmap = VisualizeRound(frame);

            //Dictionary<Guid,int> winners = scoringSystem.GameWinner();
            return(Visualization.DrawGameEnd(bitmap, scoringSystem.TotalScore, playerImages, groupImages));
        }
示例#4
0
        private BitmapSource VisualizeResult(VideoFrame frame)
        {
            // Draw any results on top of the image.
            BitmapSource visImage = frame.Image.ToBitmapSource();

            var result = _latestResultsToDisplay;

            if (result != null)
            {
                // See if we have local face detections for this image.
                var clientFaces = (OpenCvSharp.Rect[])frame.UserData;
                if (clientFaces != null && result.Faces != null)
                {
                    // If so, then the analysis results might be from an older frame. We need to match
                    // the client-side face detections (computed on this frame) with the analysis
                    // results (computed on the older frame) that we want to display.
                    MatchAndReplaceFaceRectangles(result.Faces, clientFaces);
                }

                if (result.IdentifyResults != null && result.IdentifyResults.Length > 0)
                {
                    for (int idx = 0; idx < result.IdentifyResults.Length; idx++)
                    {
                        // Update identification result for rendering

                        var face = result.Faces[idx];
                        var res  = result.IdentifyResults[idx];

                        result.PersonNames = new string[result.IdentifyResults.Length];
                        if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString()))
                        {
                            if (result.Faces[idx].FaceId == result.IdentifyResults[idx].FaceId)
                            {
                                result.PersonNames[idx] = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName;
                                visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames);
                            }
                            else
                            {
                                result.PersonNames[idx] = "Unknown";
                                visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames);
                            }
                        }
                        else
                        {
                            result.PersonNames[idx] = "Unknown";
                            visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames);
                        }
                    }
                }

                //DB Operation
                if (result.PersonNames != null)
                {
                    DB_Operation(result.PersonNames);
                }
            }

            return(visImage);
        }
示例#5
0
        private BitmapSource VisualizeResult(VideoFrame frame)
        {
            // Draw any results on top of the image.
            BitmapSource visImage = frame.Image.ToBitmapSource();

            LiveCameraResult result = _latestResultsToDisplay;

            if (result != null)
            {
                // See if we have local face detections for this image.
                var clientFaces = (OpenCvSharp.Rect[])frame.UserData;
                if (clientFaces != null && result.Faces != null)
                {
                    // If so, then the analysis results might be from an older frame. We need to match
                    // the client-side face detections (computed on this frame) with the analysis
                    // results (computed on the older frame) that we want to display.
                    MatchAndReplaceFaceRectangles(result.Faces, clientFaces);
                }

                if (this.gameState == GameState.Explain)
                {
                    this.Dispatcher.BeginInvoke((Action)(() =>
                    {
                        RightImage.Source = ImageProvider.Instructions;
                        //visImage = Visualization.DrawExplain(visImage);
                    }));
                }
                else if (this.gameState == GameState.RoundBegin)
                {
                    visImage = VisualizeStartRound(frame);
                }
                else if (this.gameState == GameState.RoundEnd)
                {
                    visImage = VisualizeEndRound(frame);
                }
                else if (this.gameState == GameState.Game)
                {
                    // Compute round score
                    Dictionary <Guid, int> scores = round.ComputeFrameScorePerPlayer(result);
                    scoringSystem.AddToCurrentRound(scores);
                    visImage = Visualization.DrawFaces(visImage, round, result.Identities, scoringSystem, _mode);

                    SavePlayerImages(frame.Image.ToBitmapSource(), result);
                }
                else if (this.gameState == GameState.Participants)
                {
                    visImage = Visualization.DrawParticipants(visImage, result.Faces);
                }
                else if (this.gameState == GameState.GameEnd)
                {
                    _grabber.StopProcessingAsync();
                    visImage = VisualizeEndGame(frame);
                }
            }

            return(visImage);
        }
示例#6
0
        private BitmapSource VisualizeResult(VideoFrame frame)
        {
            // Draw any results on top of the image.
            BitmapSource visImage = frame.Image.ToBitmapSource();

            var result = _latestResultsToDisplay;

            if (result != null)
            {
                // See if we have local face detections for this image.
                var clientFaces = (OpenCvSharp.Rect[])frame.UserData;
                if (clientFaces != null && result.Faces != null)
                {
                    // If so, then the analysis results might be from an older frame. We need to match
                    // the client-side face detections (computed on this frame) with the analysis
                    // results (computed on the older frame) that we want to display.
                    MatchAndReplaceFaceRectangles(result.Faces, clientFaces);
                }

                visImage = Visualization.DrawFaces(visImage, result.Faces, result.EmotionScores, result.CelebrityNames);

                IDictionary <string, string> tagCollection = new Dictionary <string, string>();
                StandardsFinder standardsFinder            = new StandardsFinder();

                if (result.Tags != null && result.Tags.Count() > 0)
                {
                    foreach (var tag in result.Tags)
                    {
                        var standardName = standardsFinder.GetStandards(tag.Name);
                        //tagCollection.Add(tag.Name,standardName);
                        tag.Name = tag.Name + " -   " + standardName;
                    }
                }

                visImage = Visualization.DrawTags(visImage, result.Tags);
            }

            return(visImage);
        }
        /// <summary> Function which submits a frame to the Computer Vision API for celebrity
        ///     detection. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the celebrities returned by the API. </returns>


        private BitmapSource VisualizeResult(VideoFrame frame)
        {
            // Draw any results on top of the image.
            BitmapSource visImage = frame.Image.ToBitmapSource();

            var result = _latestResultsToDisplay;

            if (result != null)
            {
                // See if we have local face detections for this image.
                var clientFaces = (OpenCvSharp.Rect[])frame.UserData;
                if (clientFaces != null && result.Faces != null)
                {
                    // If so, then the analysis results might be from an older frame. We need to match
                    // the client-side face detections (computed on this frame) with the analysis
                    // results (computed on the older frame) that we want to display.
                    MatchAndReplaceFaceRectangles(result.Faces, clientFaces);
                }

                var returnVal = Visualization.DrawFaces(visImage, result.Faces, result.EmotionScores, result.CelebrityNames, this);
                visImage = returnVal.Item1;

                // if (returnVal.Item2 != "" || returnVal.Item3 != "")
                // {
                //returnString,
                //returnCode,
                //customerAge,
                //customerGender,
                //customerEmotion

                RESTPost(returnVal.Item2, returnVal.Item3, returnVal.Item4, returnVal.Item5, returnVal.Item6, returnVal.Item7);
                // }
            }

            return(visImage);
        }
示例#8
0
        private BitmapSource RenderFrame(VideoFrame frame)
        {
            // Draw any results on top of the image.
            BitmapSource visImage = frame.Image.ToBitmapSource();

            List <Player> players = _gameService.Players;

            if (players != null)
            {
                var clientFaces = (OpenCvSharp.Rect[])frame.UserData;
                //if (clientFaces != null && players.Count > 0)
                //{
                //    // If so, then the analysis results might be from an older frame. We need to match
                //    // the client-side face detections (computed on this frame) with the analysis
                //    // results (computed on the older frame) that we want to display.
                //    _gameService.ProcessLocalFaces(clientFaces);
                //}
                visImage = Visualization.DrawPlayers(visImage, players, _gameService.State);
                //visImage = Visualization.DrawScore(visImage, _gameService.Score);

                switch (_gameService.State)
                {
                case GameStateEnum.Started:
                    visImage = Visualization.DrawTime(visImage, _gameService.TimeElapsed);
                    break;

                case GameStateEnum.Finished:
                    visImage = Visualization.DrawResults(visImage, players);
                    break;

                default:
                    break;
                }
            }
            return(visImage);
        }
示例#9
0
        private BitmapSource VisualizeStartRound(VideoFrame frame)
        {
            var bitmap = VisualizeRound(frame);

            return(Visualization.DrawRoundStart(bitmap, round, roundNumber));
        }
示例#10
0
        private ImageSource VisualizeTimer(bool drawIndicator)
        {
            // Draw any results on top of the image.

            return(Visualization.DrawTime(timerText, drawIndicator, round));
        }
示例#11
0
 internal static void ShowAd(String hybrisResponse, System.Windows.Controls.Image rightImage)
 {
     //Hackathon: show the ad in the user interface.
     Visualization.DrawAds(rightImage, new BitmapImage(new Uri(hybrisResponse)));
 }
示例#12
0
        public MainWindow()
        {
            InitializeComponent();
            Properties.Settings.Default.UploadStatus = "";

            // Create grabber.
            _grabber = new FrameGrabber <LiveCameraResult>();

            // Set up a listener for when the client receives a new frame.
            _grabber.NewFrameProvided += (s, e) =>
            {
                if (_mode == AppMode.EmotionsWithClientFaceDetect)
                {
                    // Local face detection.
                    var rects = _localFaceDetector.DetectMultiScale(e.Frame.Image);
                    // Attach faces to frame.
                    e.Frame.UserData = rects;
                }

                // The callback may occur on a different thread, so we must use the
                // MainWindow.Dispatcher when manipulating the UI.
                this.Dispatcher.BeginInvoke((Action)(() =>
                {
                    // Display the image in the left pane.
                    LeftImage.Source = e.Frame.Image.ToBitmapSource();

                    // If we're fusing client-side face detection with remote analysis, show the
                    // new frame now with the most recent analysis available.
                    if (_fuseClientRemoteResults)
                    {
                        RightImage.Source = VisualizeResult(e.Frame);
                    }

                    if (_mode == AppMode.AtosEmployees)
                    {
                        var result = _latestResultsToDisplay;
                        if (result != null)
                        {
                            Visualization.SummarizeAllAttributes(DetailsArea, result.Faces, result.CelebrityNames, result.Tags, result.EmotionScores);
                        }
                    }
                    else
                    {
                        DetailsArea.Text = "";
                    }
                }));

                // See if auto-stop should be triggered.
                if (Properties.Settings.Default.AutoStopEnabled && (DateTime.Now - _startTime) > Properties.Settings.Default.AutoStopTime)
                {
                    _grabber.StopProcessingAsync();
                }
            };

            // Set up a listener for when the client receives a new result from an API call.
            _grabber.NewResultAvailable += (s, e) =>
            {
                this.Dispatcher.BeginInvoke((Action)(() =>
                {
                    if (e.TimedOut)
                    {
                        MessageArea.Text = "API call timed out.";
                    }
                    else if (e.Exception != null)
                    {
                        string apiName = "";
                        string message = e.Exception.Message;
                        var faceEx = e.Exception as FaceAPI.FaceAPIException;
                        var emotionEx = e.Exception as Common.ClientException;
                        var visionEx = e.Exception as VisionAPI.ClientException;
                        if (faceEx != null)
                        {
                            apiName = "Face";
                            message = faceEx.ErrorMessage;
                        }
                        else if (emotionEx != null)
                        {
                            apiName = "Emotion";
                            message = emotionEx.Error.Message;
                        }
                        else if (visionEx != null)
                        {
                            apiName = "Computer Vision";
                            message = visionEx.Error.Message;
                        }
                        MessageArea.Text = string.Format("{0} API call failed on frame {1}. Exception: {2}", apiName, e.Frame.Metadata.Index, message);
                    }
                    else
                    {
                        _latestResultsToDisplay = e.Analysis;

                        // Display the image and visualization in the right pane.
                        if (!_fuseClientRemoteResults)
                        {
                            RightImage.Source = VisualizeResult(e.Frame);
                        }
                    }
                }));
            };

            // Create local face detector.
            _localFaceDetector.Load("Data/haarcascade_frontalface_alt2.xml");
        }