/// <summary> /// 播放摄像头线程 /// </summary> private void Play_Camera() { while (bPlayflag) { Mat cFrame = new Mat(); try { m_vCapture.Read(cFrame); int sleepTime = (int)Math.Round(1000 / m_vCapture.Fps); Cv2.WaitKey(sleepTime); if (cFrame.Empty()) { continue; } Cv2.Flip(cFrame, cFrame, OpenCvSharp.FlipMode.Y); #region 人脸追踪 //检测人脸,得到Rect框 MultiFaceModel multiFaceInfo = arcFace.FaceDetection(cFrame.ToBitmap()); if (multiFaceInfo.FaceInfoList.Count > 0) { Mrect mrect = multiFaceInfo.FaceInfoList[0].faceRect; Rect cMaxrect = new Rect(mrect.left, mrect.top, mrect.right - mrect.left, mrect.bottom - mrect.top); //绘制指定区域(人脸框) Scalar color = new Scalar(0, 0, 255); Cv2.Rectangle(cFrame, cMaxrect, color, 1); if (bTakePicture)//拍照,截取指定区域 { Mat cHead = new Mat(cFrame, cMaxrect); Cv2.ImWrite(PicSavePath, cHead); SetPictureBoxImage(pic_head, cHead.ToBitmap()); cHead.Release(); bTakePicture = false; } } multiFaceInfo.Dispose(); #endregion SetPictureBoxImage(pic_cam, cFrame.ToBitmap()); cFrame.Release();//释放 //GC.Collect(); } catch(Exception ex) { cFrame.Release();//释放 bTakePicture = false; //GC.Collect(); }finally { //GC.Collect(); } } }
/// <summary> /// https://github.com/Thxzzzzz/ArcFaceSharp /// </summary> /// <param name="videoPath"></param> /// <param name="videoTitle"></param> static void RunOnArcFace(string videoPath, string videoTitle) { // 视频地址 VideoCapture capture = new VideoCapture();//@"D:\ca1af880d3653be69ed6d9ce55058c21.mp4" capture.Open(videoPath); Window win = new Window(videoTitle); ArcFaceCore arcFaceCore = new ArcFaceCore(APP_ID, SDK_KEY, ArcFaceDetectMode.VIDEO, ArcFaceFunction.FACE_DETECT | ArcFaceFunction.FACE_RECOGNITION | ArcFaceFunction.AGE | ArcFaceFunction.FACE_3DANGLE | ArcFaceFunction.GENDER, DetectionOrientPriority.ASF_OP_0_ONLY, 50, 32); while (true) { Mat image = new Mat(); capture.Read(image); if (image.Empty()) { continue; } Bitmap bitmap = image.ToBitmap(); ImageData imageData = ImageDataConverter.ConvertToImageData(bitmap); //人脸检测 MultiFaceModel multiFaceModel = arcFaceCore.FaceDetection(imageData, false); // 人脸信息检测 先调用这个接口才能获取以下三个信息 arcFaceCore.FaceProcess(imageData, multiFaceModel); //获取年龄信息 List <int> ageList = arcFaceCore.GetAge(); foreach (var item in ageList) { Console.WriteLine("Age:" + item); } // 获取性别信息 List <int> genderList = arcFaceCore.GetGender(); foreach (var item in genderList) { Console.WriteLine("Sex:" + item); } // 获取人脸角度信息 List <Face3DAngleModel> face3DAngleList = arcFaceCore.GetFace3DAngle(); //foreach (var item in face3DAngleList) //{ // Console.WriteLine("Face3D:" + item.); //} //asfSingleFaceInfo 为人脸检测接口返回的人脸信息中的其中一个人脸信息 AsfSingleFaceInfo asfSingleFaceInfo = new AsfSingleFaceInfo(); try { AsfFaceFeature asfFaceFeature = arcFaceCore.FaceFeatureExtract(imageData, ref asfSingleFaceInfo); } catch (ResultCodeException e) { Console.WriteLine(e.ResultCode); //throw; } win.Image = bitmap.ToMat(); // 释放销毁引擎 arcFaceCore.Dispose(); // ImageData使用完之后记得要 Dispose 否则会导致内存溢出 imageData.Dispose(); //faceData2.Dispose(); // BItmap也要记得 Dispose //face1.Dispose(); bitmap.Dispose(); } }
public void TestMethod1() { // SDK对应的 APP_ID SDK_KEY string APP_ID = @"7NK7KSpfgxdqb74r8nvy36kDwH3wVGstr2LHGHBxQ8LY"; string SDK_KEY = @"3fD8vKYMNfPzKHMoqppjA9chGh2aGkWzUQNFiAj7Yq63"; // 加载图片 Bitmap heying = new Bitmap(@"heying.jpg"); Bitmap face1 = new Bitmap(@"ldh0.jpg"); Bitmap face2 = new Bitmap(@"ldh1.jpg"); Bitmap face3 = new Bitmap(@"zxy0.jpg"); // 创建 ArcFaceCore 对象,向构造函数传入相关参数进行 ArcFace 引擎的初始化 ArcFaceCore arcFace = new ArcFaceCore(APP_ID, SDK_KEY, ArcFaceDetectMode.IMAGE, ArcFaceFunction.FACE_DETECT | ArcFaceFunction.FACE_RECOGNITION | ArcFaceFunction.AGE | ArcFaceFunction.FACE_3DANGLE | ArcFaceFunction.GENDER, DetectionOrientPriority.ASF_OP_0_ONLY, 50, 32); // 将 Bitmap 转换成 ImageData ImageData heyingImgData = ImageDataConverter.ConvertToImageData(heying); // 人脸检测 // 也可直接传入 Bitmap 来调用相关接口 会自动转换成 ImageData,但这里推荐用 ImageData MultiFaceModel multiFaceB = arcFace.FaceDetection(heying); // 传入 ImageData ,推荐使用这个接口 MultiFaceModel multiFace = arcFace.FaceDetection(heyingImgData); // 人脸信息检测(年龄/性别/人脸3D角度)最多支持4张人脸信息检测,超过部分返回未知 这是官方文档的说明 arcFace.FaceProcess(heyingImgData, multiFace); // 获取年龄信息 List <int> ageList = arcFace.GetAge(); // 获取性别信息 List <int> genderList = arcFace.GetGender(); // 获取人脸角度信息 List <Face3DAngleModel> face3DAngleList = arcFace.GetFace3DAngle(); // 将第一张图片的 Bitmap 转换成 ImageData ImageData faceData1 = ImageDataConverter.ConvertToImageData(face1); // 检测第一张图片中的人脸 MultiFaceModel multiFace1 = arcFace.FaceDetection(faceData1); // 取第一张图片中返回的第一个人脸信息 AsfSingleFaceInfo faceInfo1 = multiFace1.FaceInfoList.First(); // 提第一张图片中返回的第一个人脸的特征 AsfFaceFeature faceFeature1 = arcFace.FaceFeatureExtract(faceData1, ref faceInfo1); ImageData faceData2 = ImageDataConverter.ConvertToImageData(face2); // 检测第二张图片中的人脸 MultiFaceModel multiFace2 = arcFace.FaceDetection(faceData2); // 取第二张图片中返回的第一个人脸信息 AsfSingleFaceInfo faceInfo2 = multiFace2.FaceInfoList.First(); // 提第二张图片中返回的第一个人脸的特征 AsfFaceFeature faceFeature2 = arcFace.FaceFeatureExtract(faceData2, ref faceInfo2); // face1 face2 人脸对比,将会返回一个 0-1 之间的浮点数值 float result = arcFace.FaceCompare(faceFeature1, faceFeature2); ImageData faceData3 = ImageDataConverter.ConvertToImageData(face3); // 检测第二张图片中的人脸 MultiFaceModel multiFace3 = arcFace.FaceDetection(faceData3); // 取第二张图片中返回的第一个人脸信息 AsfSingleFaceInfo faceInfo3 = multiFace3.FaceInfoList.First(); // 提第二张图片中返回的第一个人脸的特征 AsfFaceFeature faceFeature3 = arcFace.FaceFeatureExtract(faceData3, ref faceInfo3); // face1 face3 人脸对比,将会返回一个 0-1 之间的浮点数值 float result2 = arcFace.FaceCompare(faceFeature1, faceFeature3); // 释放销毁引擎 arcFace.Dispose(); // ImageData使用完之后记得要 Dispose 否则会导致内存溢出 faceData1.Dispose(); faceData2.Dispose(); // BItmap也要记得 Dispose face1.Dispose(); face2.Dispose(); }
private void btn_compare_Click(object sender, EventArgs e) { //图片比对 ArcFaceCore arcFaceImg = new ArcFaceCore(APPID, FT_SDKKEY, ArcFaceDetectMode.IMAGE, ArcFaceFunction.FACE_DETECT | ArcFaceFunction.FACE_RECOGNITION | ArcFaceFunction.AGE | ArcFaceFunction.FACE_3DANGLE | ArcFaceFunction.GENDER, DetectionOrientPriority.ASF_OP_0_ONLY, 1, 16); Bitmap camImg = new Bitmap(@"E:\\CAM.JPG"); Bitmap idCardImg = new Bitmap(@"E:\\IDCARDIMG.JPG"); //将第一张图片的 Bitmap 转换成 ImageData ImageData camImgData = ImageDataConverter.ConvertToImageData(camImg); ImageData idCardImgData = ImageDataConverter.ConvertToImageData(idCardImg); try { // 检测第一张图片中的人脸 MultiFaceModel camImgMultiFace = arcFaceImg.FaceDetection(camImgData); // 取第一张图片中返回的第一个人脸信息 AsfSingleFaceInfo camImgfaceInfo = camImgMultiFace.FaceInfoList.First(); // 提第一张图片中返回的第一个人脸的特征 AsfFaceFeature asfFaceFeatureCam = arcFaceImg.FaceFeatureExtract(camImgData, ref camImgfaceInfo); MultiFaceModel idCardImgMultiFace = arcFaceImg.FaceDetection(idCardImgData); AsfSingleFaceInfo idCardImgfaceInfo = idCardImgMultiFace.FaceInfoList.First(); AsfFaceFeature asfFaceFeatureIdCard = arcFaceImg.FaceFeatureExtract(idCardImgData, ref idCardImgfaceInfo); float ret = arcFaceImg.FaceCompare(asfFaceFeatureCam, asfFaceFeatureIdCard); if (ret > 0.6) { //MessageBox.Show("人脸匹配成功"+ret); lbl_msg.ForeColor = Color.Green; lbl_msg.Text = "人脸匹配成功--相似度:" + ret; bPlayflag = false; ThreadCam.Abort(); m_vCapture.Release(); btn_play.Text = "打开摄像头"; } else { //MessageBox.Show("人脸匹配失败" + ret); lbl_msg.ForeColor = Color.Red; lbl_msg.Text = "人脸匹配失败--相似度:" + ret; } } catch(Exception ex) { //MessageBox.Show("人脸匹配失败Ex"); lbl_msg.ForeColor = Color.Red; lbl_msg.Text = "人脸匹配失败Ex:" + ex.Message; } finally { //释放销毁引擎 arcFaceImg.Dispose(); // ImageData使用完之后记得要 Dispose 否则会导致内存溢出 camImgData.Dispose(); idCardImgData.Dispose(); // BItmap也要记得 Dispose camImg.Dispose(); idCardImg.Dispose(); GC.Collect(); } }