public async Task <emotion_ferplusOutput> EvaluateAsync(emotion_ferplusInput input) { binding.Bind("Input338", input.Input338); var result = await session.EvaluateAsync(binding, "0"); var output = new emotion_ferplusOutput(); output.Plus692_Output_0 = result.Outputs["Plus692_Output_0"] as TensorFloat; return(output); }
// get frame and analyze private async void Preview_FrameArrived(object sender, FrameEventArgs e) { if (!alarmOn) { return; } var bitmap = e.VideoFrame.SoftwareBitmap; if (bitmap == null) { return; } // faceDector requires Gray8 or Nv12 var convertedBitmap = SoftwareBitmap.Convert(bitmap, BitmapPixelFormat.Gray8); var faces = await faceDetector.DetectFacesAsync(convertedBitmap); // if there is a face in the frame, evaluate the emotion var detectedFace = faces.FirstOrDefault(); if (detectedFace != null) { var boundingBox = new Rect(detectedFace.FaceBox.X, detectedFace.FaceBox.Y, detectedFace.FaceBox.Width, detectedFace.FaceBox.Height); var croppedFace = Crop(convertedBitmap, boundingBox); emotion_ferplusInput input = new emotion_ferplusInput(); input.Input338 = VideoFrame.CreateWithSoftwareBitmap(croppedFace); var emotionResults = await model.EvaluateAsync(input); // to get percentages, you'd need to run the output through a softmax function // we don't need percentages, we just need max value var emotionIndex = emotionResults.Plus692_Output_0.IndexOf(emotionResults.Plus692_Output_0.Max()); if (emotionIndex == currentEmotionIndex) { // if the user has been dooing the same emotion for over 3 seconds - turn off alarm if (lastTimeEmotionMatched != null && DateTime.Now - lastTimeEmotionMatched >= TimeSpan.FromSeconds(3)) { alarmOn = false; } if (lastTimeEmotionMatched == null) { lastTimeEmotionMatched = DateTime.Now; } } else { lastTimeEmotionMatched = null; } } else { // can't find face lastTimeEmotionMatched = null; } }