private BitmapSource VisualizeResult(VideoFrame frame) { // Draw any results on top of the image. BitmapSource visImage = frame.Image.ToBitmapSource(); var result = _latestResultsToDisplay; if (result != null) { // See if we have local face detections for this image. var clientFaces = (OpenCvSharp.Rect[])frame.UserData; if (clientFaces != null && result.Faces != null) { // If so, then the analysis results might be from an older frame. We need to match // the client-side face detections (computed on this frame) with the analysis // results (computed on the older frame) that we want to display. MatchAndReplaceFaceRectangles(result.Faces, clientFaces); } visImage = Visualization.DrawFaces(visImage, result.Faces, result.EmotionScores, result.CelebrityNames); visImage = Visualization.DrawTags(visImage, result.Tags); } return(visImage); }
private BitmapSource VisualizeResult(VideoFrame frame) { // Draw any results on top of the image. BitmapSource visImage = frame.Image.ToBitmapSource(); var result = _latestResultsToDisplay; if (result != null) { // See if we have local face detections for this image. var clientFaces = (OpenCvSharp.Rect[])frame.UserData; if (clientFaces != null && result.Faces != null) { // If so, then the analysis results might be from an older frame. We need to match // the client-side face detections (computed on this frame) with the analysis // results (computed on the older frame) that we want to display. MatchAndReplaceFaceRectangles(result.Faces, clientFaces); } if (result.IdentifyResults != null && result.IdentifyResults.Length > 0) { for (int idx = 0; idx < result.IdentifyResults.Length; idx++) { // Update identification result for rendering var face = result.Faces[idx]; var res = result.IdentifyResults[idx]; result.PersonNames = new string[result.IdentifyResults.Length]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { if (result.Faces[idx].FaceId == result.IdentifyResults[idx].FaceId) { result.PersonNames[idx] = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames); } else { result.PersonNames[idx] = "Unknown"; visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames); } } else { result.PersonNames[idx] = "Unknown"; visImage = Visualization.DrawFaces(visImage, result.Faces, result.PersonNames); } } } //DB Operation if (result.PersonNames != null) { DB_Operation(result.PersonNames); } } return(visImage); }
private BitmapSource VisualizeResult(VideoFrame frame) { // Draw any results on top of the image. BitmapSource visImage = frame.Image.ToBitmapSource(); LiveCameraResult result = _latestResultsToDisplay; if (result != null) { // See if we have local face detections for this image. var clientFaces = (OpenCvSharp.Rect[])frame.UserData; if (clientFaces != null && result.Faces != null) { // If so, then the analysis results might be from an older frame. We need to match // the client-side face detections (computed on this frame) with the analysis // results (computed on the older frame) that we want to display. MatchAndReplaceFaceRectangles(result.Faces, clientFaces); } if (this.gameState == GameState.Explain) { this.Dispatcher.BeginInvoke((Action)(() => { RightImage.Source = ImageProvider.Instructions; //visImage = Visualization.DrawExplain(visImage); })); } else if (this.gameState == GameState.RoundBegin) { visImage = VisualizeStartRound(frame); } else if (this.gameState == GameState.RoundEnd) { visImage = VisualizeEndRound(frame); } else if (this.gameState == GameState.Game) { // Compute round score Dictionary <Guid, int> scores = round.ComputeFrameScorePerPlayer(result); scoringSystem.AddToCurrentRound(scores); visImage = Visualization.DrawFaces(visImage, round, result.Identities, scoringSystem, _mode); SavePlayerImages(frame.Image.ToBitmapSource(), result); } else if (this.gameState == GameState.Participants) { visImage = Visualization.DrawParticipants(visImage, result.Faces); } else if (this.gameState == GameState.GameEnd) { _grabber.StopProcessingAsync(); visImage = VisualizeEndGame(frame); } } return(visImage); }
private BitmapSource VisualizeResult(VideoFrame frame) { // Draw any results on top of the image. BitmapSource visImage = frame.Image.ToBitmapSource(); var result = _latestResultsToDisplay; if (result != null) { // See if we have local face detections for this image. var clientFaces = (OpenCvSharp.Rect[])frame.UserData; if (clientFaces != null && result.Faces != null) { // If so, then the analysis results might be from an older frame. We need to match // the client-side face detections (computed on this frame) with the analysis // results (computed on the older frame) that we want to display. MatchAndReplaceFaceRectangles(result.Faces, clientFaces); } visImage = Visualization.DrawFaces(visImage, result.Faces, result.EmotionScores, result.CelebrityNames); IDictionary <string, string> tagCollection = new Dictionary <string, string>(); StandardsFinder standardsFinder = new StandardsFinder(); if (result.Tags != null && result.Tags.Count() > 0) { foreach (var tag in result.Tags) { var standardName = standardsFinder.GetStandards(tag.Name); //tagCollection.Add(tag.Name,standardName); tag.Name = tag.Name + " - " + standardName; } } visImage = Visualization.DrawTags(visImage, result.Tags); } return(visImage); }
/// <summary> Function which submits a frame to the Computer Vision API for celebrity /// detection. </summary> /// <param name="frame"> The video frame to submit. </param> /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call, /// and containing the celebrities returned by the API. </returns> private BitmapSource VisualizeResult(VideoFrame frame) { // Draw any results on top of the image. BitmapSource visImage = frame.Image.ToBitmapSource(); var result = _latestResultsToDisplay; if (result != null) { // See if we have local face detections for this image. var clientFaces = (OpenCvSharp.Rect[])frame.UserData; if (clientFaces != null && result.Faces != null) { // If so, then the analysis results might be from an older frame. We need to match // the client-side face detections (computed on this frame) with the analysis // results (computed on the older frame) that we want to display. MatchAndReplaceFaceRectangles(result.Faces, clientFaces); } var returnVal = Visualization.DrawFaces(visImage, result.Faces, result.EmotionScores, result.CelebrityNames, this); visImage = returnVal.Item1; // if (returnVal.Item2 != "" || returnVal.Item3 != "") // { //returnString, //returnCode, //customerAge, //customerGender, //customerEmotion RESTPost(returnVal.Item2, returnVal.Item3, returnVal.Item4, returnVal.Item5, returnVal.Item6, returnVal.Item7); // } } return(visImage); }