// make a thumbnail of a sample public static OpenCvSharp.Mat makeThumbnail( RawSample sample, string name = "") { int thumbnail_size = Worker.thumbnail_size; // buffer for the cutted image MemoryStream stream = new MemoryStream(); // make a cut in bmp format // so we don't waste time for encode/decode image // just copying it few times, which is irrelevant sample.cutFaceImage( stream, RawSample.ImageFormat.IMAGE_FORMAT_BMP, RawSample.FaceCutType.FACE_CUT_BASE); OpenCvSharp.Mat temp = OpenCvSharp.Mat.ImDecode(stream.ToArray(), OpenCvSharp.ImreadModes.Color); // so we got an image // check it MAssert.Check(!temp.Empty()); MAssert.Check(temp.Type() == OpenCvSharp.MatType.CV_8UC3); // and resize to the thumbnail_size OpenCvSharp.Rect resRect; if (temp.Rows >= temp.Cols) { resRect.Height = thumbnail_size; resRect.Width = temp.Cols * thumbnail_size / temp.Rows; } else { resRect.Width = thumbnail_size; resRect.Height = temp.Rows * thumbnail_size / temp.Cols; } resRect.X = (thumbnail_size - resRect.Width) / 2; resRect.Y = (thumbnail_size - resRect.Height) / 2; OpenCvSharp.Mat result = new OpenCvSharp.Mat( thumbnail_size, thumbnail_size, OpenCvSharp.MatType.CV_8UC3, OpenCvSharp.Scalar.All(0)); OpenCvSharp.Cv2.Resize( temp, result[resRect], resRect.Size); if (!string.IsNullOrEmpty(name)) { result[new OpenCvSharp.Rect(0, result.Rows - 27, result.Cols, 27)] = result.RowRange(result.Rows - 27, result.Rows) * 0.5f; OpenCvSharp.Cv2.PutText( result, name, new OpenCvSharp.Point(0, result.Rows - 7), OpenCvSharp.HersheyFonts.HersheyDuplex, 0.7, OpenCvSharp.Scalar.All(255), 1, OpenCvSharp.LineTypes.AntiAlias); } return(result); }
public void work(OpenCvSharp.Mat frame) { // sending the frame in the capturer (_tracker) // pbio::CVRawImage cvri_frame; byte[] data = new byte[frame.Total() * frame.Type().Channels]; Marshal.Copy(frame.DataStart, data, 0, (int)data.Length); RawImage ri_frame = new RawImage(frame.Width, frame.Height, RawImage.Format.FORMAT_BGR, data); List <RawSample> samples = _tracker.capture(ri_frame); // clone the frame for drawing on it OpenCvSharp.Mat draw_image = frame.Clone(); // handle each face on the frame separately for (int i = 0; i < samples.Count; ++i) { RawSample sample = samples[i]; // get a face rectangle RawSample.Rectangle rectangle = sample.getRectangle(); // set a point to place information for this face OpenCvSharp.Point2f text_point = new OpenCvSharp.Point2f( rectangle.x + rectangle.width + 3, rectangle.y + 10); const float text_line_height = 22; // draw facial points // red color for all points // green for left eye // yellow for right eye // (yes, there is a mess with left and right eyes in face_sdk api, // but if we fix it now we will lose compatibility with previous versions) if (_flag_points) { List <Point> points = sample.getLandmarks(); for (int j = -2; j < points.Count; ++j) { Point p = j == -2 ? sample.getLeftEye() : j == -1 ? sample.getRightEye() : points[j]; OpenCvSharp.Scalar color = j == -2 ? new OpenCvSharp.Scalar(50, 255, 50) : j == -1 ? new OpenCvSharp.Scalar(50, 255, 255) : new OpenCvSharp.Scalar(50, 50, 255); OpenCvSharp.Cv2.Circle( draw_image, new OpenCvSharp.Point2f(p.x, p.y), j < 0 ? 4 : 2, color, -1, OpenCvSharp.LineTypes.AntiAlias); } } // draw rectangle if (_flag_positions) { OpenCvSharp.Cv2.Rectangle( draw_image, new OpenCvSharp.Rect( rectangle.x, rectangle.y, rectangle.width, rectangle.height), new OpenCvSharp.Scalar(50, 50, 255), 2, OpenCvSharp.LineTypes.AntiAlias); } // draw age and gender if (_flag_age_gender) { AgeGenderEstimator.AgeGender age_gender = _age_geder_estimator.estimateAgeGender(sample); string age_text = "age: "; switch (age_gender.age) { case AgeGenderEstimator.Age.AGE_KID: age_text += "kid "; break; case AgeGenderEstimator.Age.AGE_YOUNG: age_text += "young "; break; case AgeGenderEstimator.Age.AGE_ADULT: age_text += "adult "; break; case AgeGenderEstimator.Age.AGE_SENIOR: age_text += "senior "; break; } age_text += string.Format("years: {0:G3}", age_gender.age_years); puttext( draw_image, age_text, text_point); text_point.Y += text_line_height; puttext( draw_image, age_gender.gender == AgeGenderEstimator.Gender.GENDER_FEMALE ? "gender: female" : age_gender.gender == AgeGenderEstimator.Gender.GENDER_MALE ? "gender: male" : "?", text_point); text_point.Y += text_line_height; text_point.Y += text_line_height / 3; } // draw emotions if (_flag_emotions) { List <EmotionsEstimator.EmotionConfidence> emotions = _emotions_estimator.estimateEmotions(sample); for (int j = 0; j < emotions.Count; ++j) { EmotionsEstimator.Emotion emotion = emotions[j].emotion; float confidence = emotions[j].confidence; OpenCvSharp.Cv2.Rectangle( draw_image, new OpenCvSharp.Rect( (int)text_point.X, (int)text_point.Y - (int)text_line_height / 2, (int)(100 * confidence), (int)text_line_height), emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL ? new OpenCvSharp.Scalar(255, 0, 0) : emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY ? new OpenCvSharp.Scalar(0, 255, 0) : emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY ? new OpenCvSharp.Scalar(0, 0, 255) : emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? new OpenCvSharp.Scalar(0, 255, 255) : new OpenCvSharp.Scalar(0, 0, 0), -1); puttext( draw_image, emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL ? "neutral" : emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY ? "happy" : emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY ? "angry" : emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? "surprise" : "?", text_point + new OpenCvSharp.Point2f(100, 0)); text_point.Y += text_line_height; text_point.Y += text_line_height / 3; } } // draw angles text if (_flag_angles) { string yaw, pitch, roll; yaw = string.Format("yaw: {0}", (0.1f * (int)10 * sample.getAngles().yaw + 0.5f)); pitch = string.Format("pitch: {0}", (0.1f * (int)10 * sample.getAngles().pitch + 0.5f)); roll = string.Format("roll: {0}", (0.1f * (int)10 * sample.getAngles().roll + 0.5f)); puttext(draw_image, yaw, text_point); text_point.Y += text_line_height; puttext(draw_image, pitch, text_point); text_point.Y += text_line_height; puttext(draw_image, roll, text_point); text_point.Y += text_line_height; text_point.Y += text_line_height / 3; } // draw angles vectors if (_flag_angles_vectors) { RawSample.Angles angles = sample.getAngles(); float cos_a = (float)Math.Cos(angles.yaw * OpenCvSharp.Cv2.PI / 180); float sin_a = (float)Math.Sin(angles.yaw * OpenCvSharp.Cv2.PI / 180); float cos_b = (float)Math.Cos(angles.pitch * OpenCvSharp.Cv2.PI / 180); float sin_b = (float)Math.Sin(angles.pitch * OpenCvSharp.Cv2.PI / 180); float cos_c = (float)Math.Cos(angles.roll * OpenCvSharp.Cv2.PI / 180); float sin_c = (float)Math.Sin(angles.roll * OpenCvSharp.Cv2.PI / 180); OpenCvSharp.Point3f[] xyz = { new OpenCvSharp.Point3f(cos_a * cos_c, -sin_c, -sin_a), new OpenCvSharp.Point3f(sin_c, cos_b * cos_c, -sin_b), new OpenCvSharp.Point3f(sin_a, sin_b, cos_a * cos_b) }; OpenCvSharp.Point2f center = new OpenCvSharp.Point2f( (sample.getLeftEye().x + sample.getRightEye().x) * 0.5f, (sample.getLeftEye().y + sample.getRightEye().y) * 0.5f); float length = (rectangle.width + rectangle.height) * 0.3f; for (int c = 0; c < 3; ++c) { OpenCvSharp.Cv2.Line( draw_image, center, center + new OpenCvSharp.Point2f(xyz[c].X, -xyz[c].Y) * length, c == 0 ? new OpenCvSharp.Scalar(50, 255, 255) : c == 1 ? new OpenCvSharp.Scalar(50, 255, 50) : c == 2 ? new OpenCvSharp.Scalar(50, 50, 255) : new OpenCvSharp.Scalar(), 2, OpenCvSharp.LineTypes.AntiAlias); } } // draw quality text if (_flag_quality) { QualityEstimator.Quality q = _quality_estimator.estimateQuality(sample); string lighting, noise, sharpness, flare; lighting = "lighting: " + q.lighting.ToString(); puttext(draw_image, lighting, text_point); text_point.Y += text_line_height; noise = "noise: " + q.noise.ToString(); puttext(draw_image, noise, text_point); text_point.Y += text_line_height; sharpness = "sharpness: " + q.sharpness.ToString(); puttext(draw_image, sharpness, text_point); text_point.Y += text_line_height; flare = "flare: " + q.flare.ToString(); puttext(draw_image, flare, text_point); text_point.Y += text_line_height; text_point.Y += text_line_height / 3; } // draw liveness text if (_flag_liveness) { Liveness2DEstimator.Liveness liveness_2d_result = _liveness_2d_estimator.estimateLiveness(sample); puttext( draw_image, "liveness: " + ( liveness_2d_result == Liveness2DEstimator.Liveness.REAL ? "real" : liveness_2d_result == Liveness2DEstimator.Liveness.FAKE ? "fake" : liveness_2d_result == Liveness2DEstimator.Liveness.NOT_ENOUGH_DATA ? "not enough data" : "??"), text_point); text_point.Y += text_line_height; text_point.Y += text_line_height / 3; } // draw face quality if (_flag_face_quality) { float quality = _face_quality_estimator.estimateQuality(sample); string ss = "face quality: " + quality.ToString(); puttext(draw_image, ss, text_point); text_point.Y += text_line_height; text_point.Y += text_line_height / 3; } // draw face cuts for (int cut_i = 0; cut_i < 3; ++cut_i) { if ((cut_i == 0 && !_flag_cutting_base) || (cut_i == 1 && !_flag_cutting_full) || (cut_i == 2 && !_flag_cutting_token)) { continue; } puttext( draw_image, cut_i == 0 ? "base cut:" : cut_i == 1 ? "full cut:" : cut_i == 2 ? "token cut:" : "?? cut", text_point); text_point.Y += text_line_height / 2; MemoryStream obuf = new MemoryStream(); sample.cutFaceImage( obuf, RawSample.ImageFormat.IMAGE_FORMAT_BMP, cut_i == 0 ? RawSample.FaceCutType.FACE_CUT_BASE : cut_i == 1 ? RawSample.FaceCutType.FACE_CUT_FULL_FRONTAL : cut_i == 2 ? RawSample.FaceCutType.FACE_CUT_TOKEN_FRONTAL : (RawSample.FaceCutType) 999); byte[] sbuf = obuf.ToArray(); // const OpenCvSharp.Mat_<uchar> cvbuf(1, sbuf.length(), (uchar*) sbuf.c_str()); OpenCvSharp.Mat img = OpenCvSharp.Cv2.ImDecode(sbuf, OpenCvSharp.ImreadModes.Unchanged); OpenCvSharp.Cv2.Resize(img, img, OpenCvSharp.Size.Zero, 0.3, 0.3); int img_rect_x = (int)Math.Max(0, -text_point.X); int img_rect_y = (int)Math.Max(0, -text_point.Y); int img_rect_width = (int)Math.Min( img.Cols - img_rect_x, draw_image.Cols - Math.Max(0, text_point.X)); int img_rect_height = (int)Math.Min( img.Rows - img_rect_y, draw_image.Rows - Math.Max(0, text_point.Y)); if (img_rect_width <= 0 || img_rect_height <= 0) { continue; } OpenCvSharp.Rect img_rect = new OpenCvSharp.Rect(img_rect_x, img_rect_y, img_rect_width, img_rect_height); img[img_rect].CopyTo( draw_image[new OpenCvSharp.Rect( (int)Math.Max(0, text_point.X), (int)Math.Max(0, text_point.Y), img_rect.Width, img_rect.Height)]); text_point.Y += text_line_height / 2; text_point.Y += img.Rows; text_point.Y += text_line_height / 3; } } // draw checkboxes for (int i = 0; i < flags_count; ++i) { OpenCvSharp.Rect rect = flag_rect(i); OpenCvSharp.Rect rect2 = new OpenCvSharp.Rect(rect.X + 5, rect.Y + 5, rect.Width - 10, rect.Height - 10); OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(255), -1); OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(0), 2, OpenCvSharp.LineTypes.AntiAlias); if (get_flag(i)) { OpenCvSharp.Cv2.Rectangle(draw_image, rect2, OpenCvSharp.Scalar.All(0), -1, OpenCvSharp.LineTypes.AntiAlias); } puttext( draw_image, flag_name(i), new OpenCvSharp.Point2f(rect.X + rect.Width + 3, rect.Y + rect.Height - 3)); } // show image with drawed information OpenCvSharp.Cv2.ImShow("demo", draw_image); // register callback on mouse events OpenCvSharp.Cv2.SetMouseCallback("demo", (OpenCvSharp.CvMouseCallback)onMouse); }