public async Task <RfbOutput> EvaluateAsync(RfbInput input) { binding.Bind("input", input.input); var result = await session.EvaluateAsync(binding, "0"); var output = new RfbOutput(); output.scores = result.Outputs["scores"] as TensorFloat; output.boxes = result.Outputs["boxes"] as TensorFloat; return(output); }
/// <summary> /// Load the picked image and preprocessed it as the model input. /// The function should excute after the image is loaded. /// </summary> private async void DetectFaces() { // Detect face using Onnx models rfbInput.input = FaceDetectionHelper.SoftwareBitmapToTensorFloat(imageInputData); rfbOutput = await rfbModelGen.EvaluateAsync(rfbInput); List <FaceDetectionRectangle> faceRects = (List <FaceDetectionRectangle>)FaceDetectionHelper.Predict(rfbOutput.scores, rfbOutput.boxes); // Detect facial landmarks using Onnx models List <FaceLandmarks> faceLandmarksList = new List <FaceLandmarks>(); if (ShowDetail) { closestDistance = 10000.0f; System.Diagnostics.Debug.WriteLine("Total: " + faceRects.Count); foreach (FaceDetectionRectangle faceRect in faceRects) { int rectX = (int)faceRect.X1; int rectY = (int)faceRect.Y1; int rectWidth = (int)(faceRect.X2 - faceRect.X1) + 1; int rectHeight = (int)(faceRect.Y2 - faceRect.Y1) + 1; // Crop only the image region with faces SoftwareBitmap croppedBitmap = new SoftwareBitmap( imageInputData.BitmapPixelFormat, FaceLandmarkHelper.inputImageDataSize, FaceLandmarkHelper.inputImageDataSize, BitmapAlphaMode.Ignore); System.Diagnostics.Debug.WriteLine("Crop"); bool cropped = openCVHelper.CropResize(imageInputData, croppedBitmap, rectX, rectY, rectWidth, rectHeight); if (!cropped) { continue; } // Model Processing landmarkInput.input = FaceDetectionHelper.SoftwareBitmapToTensorFloat(croppedBitmap); landmarkOutput = await landmarkModelGen.EvaluateAsync(landmarkInput); FaceLandmarks faceLandmarks = (FaceLandmarks)FaceLandmarkHelper.Predict(landmarkOutput.output, rectX, rectY, rectWidth, rectHeight); // Calculate camera distance if (faceLandmarks.IsValid) { System.Diagnostics.Debug.WriteLine("Valid: " + faceLandmarks.landmarkList.Count); float distance = ImageHelper.CalculateCameraDistance(cameraFocalLength, faceLandmarks.EyeDistance); closestDistance = distance < closestDistance ? distance : closestDistance; faceLandmarksList.Add(faceLandmarks); } croppedBitmap.Dispose(); } closestDistance = closestDistance == 10000.0f ? 0.0f : closestDistance; if (CameraMode) { detailText.Text = $"Distance: {(int)closestDistance} cm"; } else { detailText.Text = ""; } } else { detailText.Text = ""; } // Draw rectangles or facial landmarks of detected faces on top of image ClearPreviousFaceRectangles(); if (ShowDetail) { drawingFace.DrawFaceAll(faceRects, faceLandmarksList); } else { drawingFace.DrawFaceRetangles(faceRects); } foreach (Path path in drawingFace.pathes) { imageGrid.Children.Add(path); } faceLandmarksList.Clear(); }