private Bitmap VisualizeResult(VideoFrame frame) { BitmapSource visImage = frame.Image.ToBitmapSource(); var result = _latestResultsToDisplay; if (result != null) { // See if we have local face detections for this image. var clientFaces = (OpenCvSharp.Rect[])frame.UserData; if (clientFaces != null && result.Faces != null) { // If so, then the analysis results might be from an older frame. We need to match // the client-side face detections (computed on this frame) with the analysis // results (computed on the older frame) that we want to display. MatchAndReplaceFaceRectangles(result.Faces, clientFaces); } visImage = Visualization.DrawFaces(visImage, result.Faces, result.EmotionScores, result.CelebrityNames, imageDrawing); visImage = Visualization.DrawTags(visImage, result.Tags); } return(BitmapFromSource(visImage)); }
public Form1() { InitializeComponent(); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None; // Create grabber. _grabber = new FrameGrabber <LiveCameraResult>(); _grabber.NewFrameProvided += (s, e) => { var rects = _localFaceDetector.DetectMultiScale(e.Frame.Image); // Attach faces to frame. e.Frame.UserData = rects; this.Invoke((Action) delegate { Visualization.ClearEmojis(imageDrawing); // Display the image in the left pane. imageBase.Image = e.Frame.Image.ToBitmap(); // If we're fusing client-side face detection with remote analysis, show the // new frame now with the most recent analysis available. if (_fuseClientRemoteResults) { imageDrawing.Image = VisualizeResult(e.Frame); } }); }; // Set up a listener for when the client receives a new result from an API call. _grabber.NewResultAvailable += (s, e) => { this.Invoke((Action) delegate { Visualization.ClearEmojis(imageDrawing); if (e.TimedOut) { //System.Windows.MessageBox.Show("API call timed out."); } else if (e.Exception != null) { string apiName = ""; string message = e.Exception.Message; var faceEx = e.Exception as FaceAPIException; var emotionEx = e.Exception as Microsoft.ProjectOxford.Common.ClientException; var visionEx = e.Exception as Microsoft.ProjectOxford.Vision.ClientException; if (faceEx != null) { apiName = "Face"; message = faceEx.ErrorMessage; } else if (emotionEx != null) { apiName = "Emotion"; message = emotionEx.Error.Message; } else if (visionEx != null) { apiName = "Computer Vision"; message = visionEx.Error.Message; } //System.Windows.MessageBox.Show(string.Format("{0} API call failed on frame {1}. Exception: {2}", apiName, e.Frame.Metadata.Index, message)); } else { _latestResultsToDisplay = e.Analysis; // Display the image and visualization in the right pane. if (!_fuseClientRemoteResults) { imageDrawing.Image = VisualizeResult(e.Frame); } } }); }; _localFaceDetector.Load("Data/haarcascade_frontalface_alt2.xml"); }