// 测试人脸检测 public static void test_face_track() { FaceTrack ft = new FaceTrack(); // 人脸检测(传入图片文件路径),返回json // ft.test_track(); // 最大人脸检测(传入图片文件路径),返回json //ft.test_track_max_face(); // 人脸检测(传入图片二进制文件buffer),返回json // ft.test_track_by_buf(); // 最大人脸检测(传入图片二进制文件buffer),返回json // ft.test_track_max_face_by_buf(); //usb 摄像头实时人脸检测 ft.test_usb_track_face_info(); // 清除跟踪的人脸信息 //ft.test_clear_tracked_faces(); }
// 人脸质量(传入opencv视频帧及检测到人脸信息,适应于多人脸) public void test_get_face_quality_by_face() { Mat img = Cv2.ImRead("d:\\1112.jpg"); int ilen = 1;//传入的人脸数 TrackFaceInfo track_info = new TrackFaceInfo(); track_info.landmarks = new int[144]; track_info.headPose = new float[3]; track_info.face_id = 0; track_info.score = 0; int sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo)); IntPtr ptT = Marshal.AllocHGlobal(sizeTrack * ilen); RotatedRect box; int faceSize = ilen; //返回人脸数 分配人脸数和检测到人脸数的最小值 int curSize = ilen; //当前人脸数 输入分配的人脸数,输出实际检测到的人脸数 faceSize = FaceTrack.track_mat(ptT, img.CvPtr, ref curSize); if (faceSize > 0) { IntPtr ptr = (IntPtr)(ptT.ToInt64()); track_info = (TrackFaceInfo)Marshal.PtrToStructure(ptr, typeof(TrackFaceInfo)); // 画人脸框 FaceTrack track = new FaceTrack(); box = track.bounding_box(track_info.landmarks, track_info.landmarks.Length); track.draw_rotated_box(ref img, ref box, new Scalar(0, 255, 0)); //Cv2.ImShow("img", img); //Cv2.WaitKey(0); } Marshal.FreeHGlobal(ptT); Console.WriteLine("face_quality_by_face"); if (faceSize > 0) { IntPtr ptrMsg = face_quality_by_face(img.CvPtr, ref track_info); string buf = Marshal.PtrToStringAnsi(ptrMsg); Console.WriteLine("attr res is:" + buf); } }
// 双目摄像头进行rgb,depth活体检测(此处适配了华杰艾米的双目摄像头) public bool rgb_depth_liveness_check_hjimi() { int faceNum = 2; //传入的人脸数 int face_size = faceNum; //当前传入人脸数,传出人脸数 TrackFaceInfo[] track_info = new TrackFaceInfo[faceNum]; for (int i = 0; i < faceNum; i++) { track_info[i] = new TrackFaceInfo(); track_info[i].landmarks = new int[144]; track_info[i].headPose = new float[3]; track_info[i].face_id = 0; track_info[i].score = 0; } int sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo)); IntPtr ptT = Marshal.AllocHGlobal(sizeTrack * faceNum); RotatedRect box; IntPtr phjimi = HjimiCamera.new_hjimi(); var rgb_win = new Window("rgb", WindowMode.AutoSize); var depth_win = new Window("depth", WindowMode.Normal); float rgb_score = 0; float depth_score = 0; Mat cv_depth = new Mat(); Mat cv_rgb = new Mat(); while (true) { bool ok = HjimiCamera.open_hjimimat(phjimi, cv_rgb.CvPtr, cv_depth.CvPtr); if (!ok) { Console.WriteLine("open camera faile"); continue; } if (cv_rgb.Empty()) { continue; } if (cv_depth.Empty()) { continue; } IntPtr resptr = rgb_depth_liveness_check_faceinfo(cv_rgb.CvPtr, cv_depth.CvPtr, ref rgb_score, ref depth_score, ref face_size, ptT); string res = Marshal.PtrToStringAnsi(resptr); Console.WriteLine("res is:{0}", res); for (int index = 0; index < face_size; index++) { IntPtr ptrTrack = (IntPtr)(ptT.ToInt64() + sizeTrack * index); track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptrTrack, typeof(TrackFaceInfo)); Console.WriteLine("in Liveness::usb_track face_id is {0}:", track_info[index].face_id); Console.WriteLine("landmarks is:"); for (int k = 0; k < 1; k++) { Console.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},", track_info[index].landmarks[k], track_info[index].landmarks[k + 1], track_info[index].landmarks[k + 2], track_info[index].landmarks[k + 3], track_info[index].landmarks[k + 4], track_info[index].landmarks[k + 5], track_info[index].landmarks[k + 6], track_info[index].landmarks[k + 7], track_info[index].landmarks[k + 8], track_info[index].landmarks[k + 9] ); } for (int k = 0; k < track_info[index].headPose.Length; k++) { Console.WriteLine("angle is:{0:f}", track_info[index].headPose[k]); } Console.WriteLine("score is:{0:f}", track_info[index].score); // 角度 Console.WriteLine("mAngle is:{0:f}", track_info[index].box.mAngle); // 人脸宽度 Console.WriteLine("mWidth is:{0:f}", track_info[index].box.mWidth); // 中心点X,Y坐标 Console.WriteLine("mCenter_x is:{0:f}", track_info[index].box.mCenter_x); Console.WriteLine("mCenter_y is:{0:f}", track_info[index].box.mCenter_y); //// 画人脸框 FaceTrack track = new FaceTrack(); box = track.bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length); track.draw_rotated_box(ref cv_rgb, ref box, new Scalar(0, 255, 0)); } Mat depth_img = new Mat(); cv_depth.ConvertTo(depth_img, MatType.CV_8UC1, 255.0 / 4500); string msg_depth = "depth score is:" + depth_score.ToString(); Cv2.PutText(depth_img, msg_depth, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0)); string msg_rgb = "rgb score is:" + rgb_score.ToString(); Cv2.PutText(cv_rgb, msg_rgb, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0)); rgb_win.ShowImage(cv_rgb); depth_win.ShowImage(depth_img); Cv2.WaitKey(1); depth_img.Release(); } Marshal.FreeHGlobal(ptT); cv_rgb.Release(); cv_depth.Release(); Cv2.DestroyWindow("rgb"); Cv2.DestroyWindow("depth"); HjimiCamera.hjimi_release(phjimi); return(true); }
//双目RGB和DEPTH静默活体检测(传入opencv视频帧)适配奥比中光mini双目摄像头 public bool rgb_depth_liveness_check_orbe() { int faceNum = 2; //传入的人脸数 int face_size = faceNum; //当前传入人脸数,传出人脸数 TrackFaceInfo[] track_info = new TrackFaceInfo[faceNum]; for (int i = 0; i < faceNum; i++) { track_info[i] = new TrackFaceInfo(); track_info[i].landmarks = new int[144]; track_info[i].headPose = new float[3]; track_info[i].face_id = 0; track_info[i].score = 0; } int sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo)); IntPtr ptT = Marshal.AllocHGlobal(sizeTrack * faceNum); IntPtr pOrbe = new_orbe(); //与OrbeRelease成对出现 Mat rgb_mat = new Mat(480, 640, MatType.CV_8UC3); Mat depth_mat = new Mat(480, 640, MatType.CV_16UC1); float rgb_score = 0; float depth_score = 0; var window_depth = new Window("depth_face"); var window_rgb = new Window("rgb_face"); while (true) { RotatedRect box; open_orbe(pOrbe, rgb_mat.CvPtr, depth_mat.CvPtr); Console.WriteLine("rgb_mat rows {0} depth_mat rows {1}", rgb_mat.Rows, depth_mat.Rows); if (!rgb_mat.Empty() && !depth_mat.Empty()) { IntPtr resptr = rgb_depth_liveness_check_faceinfo(rgb_mat.CvPtr, depth_mat.CvPtr, ref rgb_score, ref depth_score, ref face_size, ptT); string res = Marshal.PtrToStringAnsi(resptr); Console.WriteLine("res is:{0}", res); for (int index = 0; index < face_size; index++) { IntPtr ptrTrack = (IntPtr)(ptT.ToInt64() + sizeTrack * index); track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptrTrack, typeof(TrackFaceInfo)); Console.WriteLine("in Liveness::usb_track face_id is {0}:", track_info[index].face_id); Console.WriteLine("landmarks is:"); for (int k = 0; k < 1; k++) { Console.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},", track_info[index].landmarks[k], track_info[index].landmarks[k + 1], track_info[index].landmarks[k + 2], track_info[index].landmarks[k + 3], track_info[index].landmarks[k + 4], track_info[index].landmarks[k + 5], track_info[index].landmarks[k + 6], track_info[index].landmarks[k + 7], track_info[index].landmarks[k + 8], track_info[index].landmarks[k + 9] ); } for (int k = 0; k < track_info[index].headPose.Length; k++) { Console.WriteLine("angle is:{0:f}", track_info[index].headPose[k]); } Console.WriteLine("score is:{0:f}", track_info[index].score); // 角度 Console.WriteLine("mAngle is:{0:f}", track_info[index].box.mAngle); // 人脸宽度 Console.WriteLine("mWidth is:{0:f}", track_info[index].box.mWidth); // 中心点X,Y坐标 Console.WriteLine("mCenter_x is:{0:f}", track_info[index].box.mCenter_x); Console.WriteLine("mCenter_y is:{0:f}", track_info[index].box.mCenter_y); //// 画人脸框 FaceTrack track = new FaceTrack(); box = track.bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length); track.draw_rotated_box(ref rgb_mat, ref box, new Scalar(0, 255, 0)); } } string msg_rgb = "rgb score is:" + rgb_score.ToString(); Cv2.PutText(rgb_mat, msg_rgb, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0)); window_rgb.ShowImage(rgb_mat); //Cv2.ImShow("rgb_face", rgb_mat); string msg_depth = "depth score is:" + depth_score.ToString(); Cv2.PutText(depth_mat, msg_depth, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 255, 255)); window_depth.ShowImage(depth_mat); //Cv2.ImShow("depth_face", depth_mat); int c = Cv2.WaitKey(2); if (27 == c) { break; } } orbe_release(pOrbe);//与new_orbe成对出现 Marshal.FreeHGlobal(ptT); rgb_mat.Release(); depth_mat.Release(); Cv2.DestroyWindow("depth_face"); Cv2.DestroyWindow("rgb_face"); return(true); }
// 双目RGB和IR静默活体检测(sdk内部调用opencv,返回FaceCallback) public bool rgb_ir_liveness_check_mat() { int faceNum = 2; //传入的人脸数 int face_size = faceNum; //当前传入人脸数,传出人脸数 TrackFaceInfo[] track_info = new TrackFaceInfo[faceNum]; for (int i = 0; i < faceNum; i++) { track_info[i] = new TrackFaceInfo(); track_info[i].landmarks = new int[144]; track_info[i].headPose = new float[3]; track_info[i].face_id = 0; track_info[i].score = 0; } int sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo)); IntPtr ptT = Marshal.AllocHGlobal(sizeTrack * faceNum); long ir_time = 0; // 序号0为电脑识别的usb摄像头编号,本demo中0为ir红外摄像头 // 不同摄像头和电脑识别可能有区别 // 编号一般从0-10 */ int device = select_usb_device_id(); VideoCapture camera1 = VideoCapture.FromCamera(device); if (!camera1.IsOpened()) { Console.WriteLine("camera1 open error"); return(false); } VideoCapture camera2 = VideoCapture.FromCamera(device + 1); if (!camera2.IsOpened()) { Console.WriteLine("camera2 open error"); return(false); } RotatedRect box; Mat frame1 = new Mat(); Mat frame2 = new Mat(); Mat rgb_mat = new Mat(); Mat ir_mat = new Mat(); var window_ir = new Window("ir_face"); var window_rgb = new Window("rgb_face"); while (true) { camera1.Read(frame1); camera2.Read(frame2); if (!frame1.Empty() && !frame2.Empty()) { if (frame1.Size(0) > frame2.Size(0)) { rgb_mat = frame1; ir_mat = frame2; } else { rgb_mat = frame2; ir_mat = frame1; } float rgb_score = 0; float ir_score = 0; IntPtr ptr = rgb_ir_liveness_check_faceinfo(rgb_mat.CvPtr, ir_mat.CvPtr, ref rgb_score, ref ir_score, ref face_size, ref ir_time, ptT); string res = Marshal.PtrToStringAnsi(ptr); Console.WriteLine("res is:{0}", res); string msg_ir = "ir score is:" + ir_score.ToString(); Cv2.PutText(ir_mat, msg_ir, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0)); window_ir.ShowImage(ir_mat); Cv2.WaitKey(1); Console.WriteLine("{0}", msg_ir); string msg_rgb = "rgb score is:" + rgb_score.ToString(); Cv2.PutText(rgb_mat, msg_rgb, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0)); for (int index = 0; index < face_size; index++) { IntPtr ptrTrack = (IntPtr)(ptT.ToInt64() + sizeTrack * index); track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptrTrack, typeof(TrackFaceInfo)); Console.WriteLine("face_id is {0}:", track_info[index].face_id); Console.WriteLine("landmarks is:"); for (int k = 0; k < 1; k++) { Console.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},", track_info[index].landmarks[k], track_info[index].landmarks[k + 1], track_info[index].landmarks[k + 2], track_info[index].landmarks[k + 3], track_info[index].landmarks[k + 4], track_info[index].landmarks[k + 5], track_info[index].landmarks[k + 6], track_info[index].landmarks[k + 7], track_info[index].landmarks[k + 8], track_info[index].landmarks[k + 9] ); } for (int k = 0; k < track_info[index].headPose.Length; k++) { Console.WriteLine("angle is:{0:f}", track_info[index].headPose[k]); } Console.WriteLine("score is:{0:f}", track_info[index].score); // 角度 Console.WriteLine("mAngle is:{0:f}", track_info[index].box.mAngle); // 人脸宽度 Console.WriteLine("mWidth is:{0:f}", track_info[index].box.mWidth); // 中心点X,Y坐标 Console.WriteLine("mCenter_x is:{0:f}", track_info[index].box.mCenter_x); Console.WriteLine("mCenter_y is:{0:f}", track_info[index].box.mCenter_y); // 画人脸框 FaceTrack track = new FaceTrack(); box = track.bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length); track.draw_rotated_box(ref rgb_mat, ref box, new Scalar(0, 255, 0)); } window_rgb.ShowImage(rgb_mat); Cv2.WaitKey(1); Console.WriteLine("{0}", msg_rgb); } } Marshal.FreeHGlobal(ptT); rgb_mat.Release(); ir_mat.Release(); frame1.Release(); frame2.Release(); Cv2.DestroyWindow("ir_face"); Cv2.DestroyWindow("rgb_face"); return(true); }