/// <summary> /// 提取人脸特征,多人脸默认取第一个人脸特征 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="image">图像</param> /// <returns>保存人脸特征结构体指针</returns> public static FaceFeature ExtractFeature(FaceEngine faceEngine, Image image, out SingleFaceInfo singleFaceInfo, ref int retCode, int faceIndex = 0) { FaceFeature faceFeature = new FaceFeature(); singleFaceInfo = new SingleFaceInfo(); try { if (image == null) { return(faceFeature); } MultiFaceInfo multiFaceInfo = new MultiFaceInfo(); //人脸检测 retCode = faceEngine.ASFDetectFacesEx(image, out multiFaceInfo); if (retCode != 0) { return(faceFeature); } singleFaceInfo.faceOrient = multiFaceInfo.faceOrients[faceIndex]; singleFaceInfo.faceRect = multiFaceInfo.faceRects[faceIndex]; retCode = faceEngine.ASFFaceFeatureExtractEx(image, multiFaceInfo, out faceFeature, faceIndex); } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(faceFeature); }
/// <summary> /// 提取单人脸特征 /// </summary> /// <param name="pEngine">人脸识别引擎</param> /// <param name="image">图片</param> /// <param name="singleFaceInfo">单人脸信息</param> /// <returns>单人脸特征</returns> public static FaceFeature ExtractFeature(FaceEngine faceEngine, Image image, SingleFaceInfo singleFaceInfo) { FaceFeature faceFeature = null; try { MultiFaceInfo multiFaceInfo = new MultiFaceInfo(); multiFaceInfo.faceNum = 1; multiFaceInfo.faceOrients = new int[1]; multiFaceInfo.faceOrients[0] = singleFaceInfo.faceOrient; multiFaceInfo.faceRects = new MRECT[1]; multiFaceInfo.faceRects[0] = singleFaceInfo.faceRect; multiFaceInfo.faceDataInfoList = new FaceDataInfo[1]; multiFaceInfo.faceDataInfoList[0] = singleFaceInfo.faceDataInfo; //口罩检测 int retCode = -1; MaskInfo maskInfo = MaskEstimation(faceEngine, image, singleFaceInfo, out retCode); if (retCode != 0 || maskInfo.maskArray == null || maskInfo.maskArray.Length <= 0) { return(faceFeature); } bool isMask = maskInfo.maskArray[0].Equals(1); faceEngine.ASFFaceFeatureExtractEx(image, multiFaceInfo, ASF_RegisterOrNot.ASF_RECOGNITION, out faceFeature, 0, isMask); } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(faceFeature); }
/// <summary> /// 年龄检测 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="imageInfo">图像数据</param> /// <param name="multiFaceInfo">人脸检测结果</param> /// <returns>年龄检测结构体</returns> public static AgeInfo AgeEstimation(FaceEngine faceEngine, Image image, MultiFaceInfo multiFaceInfo, out int retCode) { retCode = -1; AgeInfo ageInfo = new AgeInfo(); try { if (multiFaceInfo.faceNum <= 0) { return(ageInfo); } //人脸信息检测 retCode = faceEngine.ASFProcessEx(image, multiFaceInfo, FaceEngineMask.ASF_AGE); if (retCode == 0) { //获取年龄信息 retCode = faceEngine.ASFGetAge(out ageInfo); } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(ageInfo); }
/// <summary> /// 提取单人脸特征 /// </summary> /// <param name="pEngine">人脸识别引擎</param> /// <param name="image">图片</param> /// <param name="singleFaceInfo">单人脸信息</param> /// <returns>单人脸特征</returns> public static bool ASFImageQualityDetectEx(FaceEngine faceEngine, Image image, SingleFaceInfo singleFaceInfo) { bool qualityResult = false; try { MultiFaceInfo multiFaceInfo = new MultiFaceInfo(); multiFaceInfo.faceNum = 1; multiFaceInfo.faceOrients = new int[1]; multiFaceInfo.faceOrients[0] = singleFaceInfo.faceOrient; multiFaceInfo.faceRects = new MRECT[1]; multiFaceInfo.faceRects[0] = singleFaceInfo.faceRect; //图片质量检测 ImageQualityInfo imageQualityInfo = new ImageQualityInfo(); int qualityRetCode = faceEngine.ASFImageQualityDetectEx(image, multiFaceInfo, out imageQualityInfo); if (qualityRetCode == 0 && imageQualityInfo != null && imageQualityInfo.faceQualityValues[0] > 0.35) { qualityResult = true; } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(qualityResult); }
/// <summary> /// 红外活体检测 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="imageInfo">图像数据</param> /// <param name="singleFaceInfo">单人脸信息</param> /// <returns>活体检测结果</returns> public static LivenessInfo LivenessInfo_IR(FaceEngine faceEngine, Image image, SingleFaceInfo singleFaceInfo, out int retCode) { LivenessInfo livenessInfo = new LivenessInfo(); retCode = -1; try { MultiFaceInfo multiFaceInfo = new MultiFaceInfo(); multiFaceInfo.faceNum = 1; multiFaceInfo.faceOrients = new int[1]; multiFaceInfo.faceOrients[0] = singleFaceInfo.faceOrient; multiFaceInfo.faceRects = new MRECT[1]; multiFaceInfo.faceRects[0] = singleFaceInfo.faceRect; multiFaceInfo.faceDataInfoList = new FaceDataInfo[1]; multiFaceInfo.faceDataInfoList[0] = singleFaceInfo.faceDataInfo; //人脸信息处理 retCode = faceEngine.ASFProcessEx_IR(image, multiFaceInfo, FaceEngineMask.ASF_IR_LIVENESS); if (retCode == 0) { //获取IR活体检测结果 retCode = faceEngine.ASFGetLivenessScore_IR(out livenessInfo); } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(livenessInfo); }
/// <summary> /// 口罩检测 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="imageInfo">图像数据</param> /// <param name="singleFaceInfo">单人脸信息</param> /// <returns>口罩检测结果</returns> public static MaskInfo MaskEstimation(FaceEngine faceEngine, Image image, SingleFaceInfo singleFaceInfo, out int retCode) { MaskInfo maskInfo = new MaskInfo(); retCode = -1; try { MultiFaceInfo multiFaceInfo = new MultiFaceInfo(); multiFaceInfo.faceNum = 1; multiFaceInfo.faceOrients = new int[1]; multiFaceInfo.faceOrients[0] = singleFaceInfo.faceOrient; multiFaceInfo.faceRects = new MRECT[1]; multiFaceInfo.faceRects[0] = singleFaceInfo.faceRect; multiFaceInfo.faceDataInfoList = new FaceDataInfo[1]; multiFaceInfo.faceDataInfoList[0] = singleFaceInfo.faceDataInfo; //人脸信息处理 retCode = faceEngine.ASFProcessEx(image, multiFaceInfo, FaceEngineMask.ASF_MASKDETECT); if (retCode == 0) { //获取口罩检测结果 retCode = faceEngine.ASFGetMask(out maskInfo); } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(maskInfo); }
/// <summary> /// 人脸信息检测(年龄/性别/人脸 3D 角度),最多支持 4 张人脸信息检测,超过部分返回未知(活体仅支持单张人脸检测,超出返回未知),接口不支持 IR 图像检测。 /// </summary> /// <param name="image"></param> /// <param name="multiFaceInfo"></param> /// <returns></returns> public bool Process(ImageInfo image, MultiFaceInfo multiFaceInfo) { var processcombinMask = CombinedMask & ~ASF_Mask.ASF_FACE_DETECT & ~ASF_Mask.ASF_FACERECOGNITION & ~ASF_Mask.ASF_IMAGEQUALITY & ~ASF_Mask.ASF_IR_LIVENESS & ~ASF_Mask.ASF_FACESHELTER & ~ASF_Mask.ASF_UPDATE_FACEDATA; var result = ASFFunctions.ASFProcess(EngineHandler, image.width, image.height, image.format, image.imgData, multiFaceInfo.ASFMultiFaceInfo, (int)processcombinMask); return(result == MResult.MOK); }
/// <summary> /// 该接口目前仅支持单人脸IR活体检测(不支持年龄、性别、3D角度的检测),默认取第一张人脸 /// 图像数据以结构体形式传入,对采用更高字节对齐方式的图像兼容性更好 /// </summary> /// <param name="imageFormat">图像格式</param> /// <param name="image">图片</param> /// <param name="multiFaceInfo">人脸框信息</param> /// <param name="combinedMask">检测属性</param> /// <returns>返回0表示正常;返回负数请根据ErrorCodeUtil类注释查看;其他值请在官网-帮助中心查询</returns> public int ASFProcessEx_IR(Image image, MultiFaceInfo multiFaceInfo, int combinedMask, ASF_ImagePixelFormat imageFormat = ASF_ImagePixelFormat.ASVL_PAF_GRAY) { int retCode = -1; if (multiFaceInfo == null) { return(ErrorCodeUtil.MULPTIFACEINFO_IS_NULL); } if (image == null) { return(ErrorCodeUtil.IMAGE_IS_NULL); } ImageInfo imageInfo = new ImageInfo(); imageInfo = ASF_ImagePixelFormat.ASVL_PAF_RGB24_B8G8R8.Equals(imageFormat) ? ImageUtil.ReadBMP(image) : ImageUtil.ReadBMP_IR(image); if (imageInfo == null) { return(ErrorCodeUtil.IMAGE_DATA_READ_FAIL); } //转化人脸信息 ASF_MultiFaceInfo multiFaceInfoStruct = new ASF_MultiFaceInfo(); IntPtr pMultiFaceInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_MultiFaceInfo>()); multiFaceInfoStruct.faceNum = multiFaceInfo.faceNum; if (multiFaceInfo.faceNum > 0) { if (multiFaceInfo.faceID != null) { multiFaceInfoStruct.faceID = MemoryUtil.Malloc(multiFaceInfo.faceNum * MemoryUtil.SizeOf <int>()); Marshal.Copy(multiFaceInfo.faceID, 0, multiFaceInfoStruct.faceID, multiFaceInfo.faceNum); } multiFaceInfoStruct.faceOrients = MemoryUtil.Malloc(multiFaceInfo.faceNum * MemoryUtil.SizeOf <int>()); Marshal.Copy(multiFaceInfo.faceOrients, 0, multiFaceInfoStruct.faceOrients, multiFaceInfo.faceNum); multiFaceInfoStruct.faceRects = MemoryUtil.Malloc(MemoryUtil.SizeOf <MRECT>() * multiFaceInfo.faceNum); byte[] allByte = new byte[MemoryUtil.SizeOf <MRECT>() * multiFaceInfo.faceNum]; for (int i = 0; i < multiFaceInfo.faceNum; i++) { byte[] tempBytes = MemoryUtil.StructToBytes(multiFaceInfo.faceRects[i]); tempBytes.CopyTo(allByte, MemoryUtil.SizeOf <MRECT>() * i); } Marshal.Copy(allByte, 0, multiFaceInfoStruct.faceRects, allByte.Length); } MemoryUtil.StructureToPtr(multiFaceInfoStruct, pMultiFaceInfo); ASF_ImageData asfInfoData = CommonUtil.TransImageDataStructByImageInfo(imageInfo); IntPtr pImageInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_ImageData>()); MemoryUtil.StructureToPtr(asfInfoData, pImageInfo); //调用SDK接口 retCode = ASFFunctions.ASFProcessEx_IR(pEngine, pImageInfo, pMultiFaceInfo, combinedMask); //释放内存 MemoryUtil.FreeArray(imageInfo.imgData, multiFaceInfoStruct.faceID, multiFaceInfoStruct.faceOrients, multiFaceInfoStruct.faceRects, pMultiFaceInfo, pImageInfo); return(retCode); }
/// <summary> /// 单人脸特征提取 /// </summary> /// <param name="imageFormat">图片格式</param> /// <param name="image">图片</param> /// <param name="multiFaceInfo">人脸框对象</param> /// <param name="faceIndex">人脸索引</param> /// <param name="faceFeature">[out]特征结果</param> /// <returns>返回0表示正常;返回负数请根据ErrorCodeUtil类注释查看;其他值请在官网-帮助中心查询</returns> public int ASFFaceFeatureExtractEx(Image image, MultiFaceInfo multiFaceInfo, out FaceFeature faceFeature, int faceIndex = 0, ASF_ImagePixelFormat imageFormat = ASF_ImagePixelFormat.ASVL_PAF_RGB24_B8G8R8) { int retCode = -1; faceFeature = new FaceFeature(); if (multiFaceInfo == null) { return(ErrorCodeUtil.MULPTIFACEINFO_IS_NULL); } if (faceIndex >= multiFaceInfo.faceNum) { return(ErrorCodeUtil.FACEINDEX_INVALID); } if (image == null) { return(ErrorCodeUtil.IMAGE_IS_NULL); } ImageInfo imageInfo = new ImageInfo(); imageInfo = ASF_ImagePixelFormat.ASVL_PAF_RGB24_B8G8R8.Equals(imageFormat) ? ImageUtil.ReadBMP(image) : ImageUtil.ReadBMP_IR(image); if (imageInfo == null) { return(ErrorCodeUtil.IMAGE_DATA_READ_FAIL); } //转化单人脸信息 SingleFaceInfo singleFaceInfo = new SingleFaceInfo(); IntPtr pSIngleFaceInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <SingleFaceInfo>()); singleFaceInfo.faceRect = multiFaceInfo.faceRects[faceIndex]; singleFaceInfo.faceOrient = multiFaceInfo.faceOrients[faceIndex]; MemoryUtil.StructureToPtr(singleFaceInfo, pSIngleFaceInfo); IntPtr pAsfFaceFeature = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_FaceFeature>()); ASF_ImageData asfInfoData = CommonUtil.TransImageDataStructByImageInfo(imageInfo); IntPtr pImageInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_ImageData>()); MemoryUtil.StructureToPtr(asfInfoData, pImageInfo); //调用SDK接口 retCode = ASFFunctions.ASFFaceFeatureExtractEx(pEngine, pImageInfo, pSIngleFaceInfo, pAsfFaceFeature); if (retCode != 0) { MemoryUtil.FreeArray(pSIngleFaceInfo, pAsfFaceFeature, imageInfo.imgData, pImageInfo); return(retCode); } ASF_FaceFeature asfFeature = MemoryUtil.PtrToStructure <ASF_FaceFeature>(pAsfFaceFeature); byte[] feature = new byte[asfFeature.featureSize]; MemoryUtil.Copy(asfFeature.feature, feature, 0, asfFeature.featureSize); faceFeature.featureSize = asfFeature.featureSize; faceFeature.feature = feature; MemoryUtil.FreeArray(pSIngleFaceInfo, pAsfFaceFeature, imageInfo.imgData, pImageInfo); return(retCode); }
/// <summary> /// 人脸信息检测(年龄/性别/人脸3D角度),最多支持4张人脸信息检测,超过部分返回未知(活体仅支持单张人脸检测,超出返回未知),接口不支持IR图像检测。 /// </summary> /// <param name="image"></param> /// <param name="multiFaceInfo"></param> /// <returns></returns> public bool ProcessEx(ImageInfo image, MultiFaceInfo multiFaceInfo) { if (_version == 2) { return(Process(image, multiFaceInfo)); } var processcombinMask = CombinedMask & ~ASF_Mask.ASF_FACE_DETECT & ~ASF_Mask.ASF_FACERECOGNITION & ~ASF_Mask.ASF_IMAGEQUALITY & ~ASF_Mask.ASF_IR_LIVENESS & ~ASF_Mask.ASF_FACESHELTER & ~ASF_Mask.ASF_UPDATE_FACEDATA; var result = ASFFunctions.ASFProcessEx(EngineHandler, image.ASFImageData, multiFaceInfo.ASFMultiFaceInfo, (int)processcombinMask); return(result == MResult.MOK); }
/// <summary> /// 该接口仅支持单人脸 IR 活体检测,超出返回未知。 /// </summary> /// <param name="image"></param> /// <param name="multiFaceInfo"></param> /// <returns></returns> public bool Process_IR(ImageInfo image, ref MultiFaceInfo multiFaceInfo) { var asfinfo = multiFaceInfo.ASFMultiFaceInfo; var result = ASFFunctions.ASFProcess_IR(EngineHandler, image.width, image.height, image.format, image.imgData, asfinfo); if (result == MResult.MOK) { multiFaceInfo.UpdateMultiFaceInfo(asfinfo); return(true); } return(false); }
/// <summary> /// 人脸检测/人脸追踪 /// 图像数据以结构体形式传入,对采用更高字节对齐方式的图像兼容性更好 /// </summary> /// <param name="imageFormat">图片格式</param> /// <param name="image">图片</param> /// <param name="multiFaceInfo">多人脸对象</param> /// <param name="detectModel">检测模式</param> /// <returns>返回0表示正常;返回负数请根据ErrorCodeUtil类注释查看;其他值请在官网-帮助中心查询</returns> public int ASFDetectFacesEx(Image image, out MultiFaceInfo multiFaceInfo, ASF_ImagePixelFormat imageFormat = ASF_ImagePixelFormat.ASVL_PAF_RGB24_B8G8R8, ASF_DetectModel detectModel = ASF_DetectModel.ASF_DETECT_MODEL_RGB) { int retCode = -1; multiFaceInfo = new MultiFaceInfo(); if (image == null) { return(ErrorCodeUtil.IMAGE_IS_NULL); } ImageInfo imageInfo = new ImageInfo(); imageInfo = ASF_ImagePixelFormat.ASVL_PAF_RGB24_B8G8R8.Equals(imageFormat) ? ImageUtil.ReadBMP(image) : ImageUtil.ReadBMP_IR(image); if (imageInfo == null) { return(ErrorCodeUtil.IMAGE_DATA_READ_FAIL); } ASF_ImageData asfInfoData = CommonUtil.TransImageDataStructByImageInfo(imageInfo); IntPtr pImageInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_ImageData>()); MemoryUtil.StructureToPtr(asfInfoData, pImageInfo); ASF_MultiFaceInfo multiFaceInfoStruct = new ASF_MultiFaceInfo(); IntPtr pMultiFaceInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_MultiFaceInfo>()); //调用SDK接口 retCode = ASFFunctions.ASFDetectFacesEx(pEngine, pImageInfo, pMultiFaceInfo); if (retCode != 0) { MemoryUtil.FreeArray(imageInfo.imgData, pMultiFaceInfo, pImageInfo); return(retCode); } multiFaceInfoStruct = MemoryUtil.PtrToStructure <ASF_MultiFaceInfo>(pMultiFaceInfo); MemoryUtil.FreeArray(imageInfo.imgData, pMultiFaceInfo, pImageInfo); //转化非托管内存到托管内存 multiFaceInfo.faceNum = multiFaceInfoStruct.faceNum; if (multiFaceInfo.faceNum > 0) { if (multiFaceInfoStruct.faceID != IntPtr.Zero) { multiFaceInfo.faceID = new int[multiFaceInfo.faceNum]; Marshal.Copy(multiFaceInfoStruct.faceID, multiFaceInfo.faceID, 0, multiFaceInfo.faceNum); } multiFaceInfo.faceOrients = new int[multiFaceInfo.faceNum]; Marshal.Copy(multiFaceInfoStruct.faceOrients, multiFaceInfo.faceOrients, 0, multiFaceInfo.faceNum); multiFaceInfo.faceRects = new MRECT[multiFaceInfo.faceNum]; for (int i = 0; i < multiFaceInfo.faceNum; i++) { multiFaceInfo.faceRects[i] = MemoryUtil.PtrToStructure <MRECT>(multiFaceInfoStruct.faceRects + MemoryUtil.SizeOf <MRECT>() * i); } } return(retCode); }
/// <summary> /// 更新人脸信息。 /// <para>该接口主要用于在需要修改人脸框时候更新人脸数据,用于之后的算法检测。一般常用与双目摄像头对齐,对齐之后的人脸框传入该接口更新人脸数据用于之后的红外活体检测。</para> /// </summary> /// <param name="image"></param> /// <param name="multiFaceInfo"></param> /// <returns></returns> public MultiFaceInfo UpdateFaceDataEx(ImageInfo image, MultiFaceInfo multiFaceInfo) { if (_version >= 4)//4.x支持方法 { var asfmultifaceinfo = multiFaceInfo.ASFMultiFaceInfo; var result = ASFFunctions.ASFUpdateFaceDataEx(EngineHandler, image.ASFImageData, out asfmultifaceinfo); if (result == MResult.MOK) { multiFaceInfo.UpdateMultiFaceInfo(asfmultifaceinfo); } } return(multiFaceInfo); }
/// <summary> /// 检测人脸信息。 /// <para>注:该接口与 DetectFaces 功能一致,但采用结构体的形式传入图像数据,对更高精度的图像兼容性更好。</para> /// </summary> /// <param name="image"></param> /// <param name="process">执行Process方法提取特性</param> /// <param name="detectonly">如果只检测人脸信息,不做特征值提取,可以设置为true则不会调用Process方法</param> /// <returns></returns> public MultiFaceInfo DetectFacesEx(ImageInfo image, bool process = false, bool detectonly = false) { if (_version == 2) { return(DetectFaces(image)); } var result = ASFFunctions.ASFDetectFacesEx(EngineHandler, image.ASFImageData, out ASF_MultiFaceInfo multiFaceInfo); if (result == MResult.MOK) { var entity = new MultiFaceInfo(multiFaceInfo); if (_version >= 4 || process)//4.x需要用到是否带口罩特性 { if (!detectonly) { var processcombinMask = CombinedMask & ~ASF_Mask.ASF_FACE_DETECT & ~ASF_Mask.ASF_FACERECOGNITION & ~ASF_Mask.ASF_IMAGEQUALITY & ~ASF_Mask.ASF_IR_LIVENESS & ~ASF_Mask.ASF_FACESHELTER & ~ASF_Mask.ASF_UPDATE_FACEDATA; result = ASFFunctions.ASFProcessEx(EngineHandler, image.ASFImageData, multiFaceInfo, (int)processcombinMask); if (result == MResult.MOK) { if (processcombinMask.HasFlag(ASF_Mask.ASF_AGE) && ASFFunctions.ASFGetAge(EngineHandler, out ASF_AgeInfo ageInfo) == MResult.MOK) { entity.SetAgeInfo(ageInfo); } if (processcombinMask.HasFlag(ASF_Mask.ASF_GENDER) && ASFFunctions.ASFGetGender(EngineHandler, out ASF_GenderInfo genderInfo) == MResult.MOK) { entity.SetGenderInfo(genderInfo); } if (processcombinMask.HasFlag(ASF_Mask.ASF_FACE3DANGLE) && ASFFunctions.ASFGetFace3DAngle(EngineHandler, out ASF_Face3DAngle face3DAngle) == MResult.MOK) { entity.SetFace3DAngle(face3DAngle); } if (processcombinMask.HasFlag(ASF_Mask.ASF_LIVENESS) && ASFFunctions.ASFGetLivenessScore(EngineHandler, out ASF_LivenessInfo livenessInfo) == MResult.MOK) { entity.SetLivenessInfo(livenessInfo); } if (processcombinMask.HasFlag(ASF_Mask.ASF_MASKDETECT) && ASFFunctions.ASFGetMask(EngineHandler, out ASF_MaskInfo maskInfo) == MResult.MOK) { entity.SetMaskInfo(maskInfo); } if (processcombinMask.HasFlag(ASF_Mask.ASF_FACELANDMARK) && ASFFunctions.ASFGetFaceLandMark(EngineHandler, out ASF_LandMarkInfo faceLandmark) == MResult.MOK) { entity.SetFaceLandmark(faceLandmark); } } } } return(entity); } return(null); }
/// <summary> /// IR图片人脸检测 /// </summary> /// <param name="pEngine"></param> /// <param name="image"></param> /// <returns></returns> public static MultiFaceInfo DetectFaceIR(FaceEngine faceEngine, Bitmap image) { MultiFaceInfo multiFaceInfo = null; try { faceEngine.ASFDetectFacesEx(image, out multiFaceInfo, ASF_ImagePixelFormat.ASVL_PAF_GRAY); } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(multiFaceInfo); }
/// <summary> /// 人脸检测 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="image">图像</param> /// <returns></returns> public static MultiFaceInfo DetectFace(FaceEngine faceEngine, Image image) { MultiFaceInfo multiFaceInfo = new MultiFaceInfo(); try { faceEngine.ASFDetectFacesEx(image, out multiFaceInfo); } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(multiFaceInfo); }
/// <summary> /// 该接口仅支持单人脸 IR 活体检测,超出返回未知。 /// </summary> /// <param name="image"></param> /// <param name="multiFaceInfo"></param> /// <returns></returns> public bool ProcessEx_IR(ImageInfo image, ref MultiFaceInfo multiFaceInfo) { var asfinfo = multiFaceInfo.ASFMultiFaceInfo; var result = ASFFunctions.ASFProcessEx_IR(EngineHandler, image.ASFImageData, asfinfo); if (result == MResult.MOK) { multiFaceInfo.UpdateMultiFaceInfo(asfinfo); var liveness = GetLivenessScore_IR(); multiFaceInfo.SetLivenessInfo(liveness); return(true); } return(false); }
/// <summary> /// 提取单人脸特征 /// </summary> /// <param name="pEngine">人脸识别引擎</param> /// <param name="image">图片</param> /// <param name="singleFaceInfo">单人脸信息</param> /// <returns>单人脸特征</returns> public static FaceFeature ExtractFeature(FaceEngine faceEngine, Image image, SingleFaceInfo singleFaceInfo) { FaceFeature faceFeature = new FaceFeature(); try { MultiFaceInfo multiFaceInfo = new MultiFaceInfo(); multiFaceInfo.faceNum = 1; multiFaceInfo.faceOrients = new int[1]; multiFaceInfo.faceOrients[0] = singleFaceInfo.faceOrient; multiFaceInfo.faceRects = new MRECT[1]; multiFaceInfo.faceRects[0] = singleFaceInfo.faceRect; //图片质量检测完成,进行特征提取 faceEngine.ASFFaceFeatureExtractEx(image, multiFaceInfo, out faceFeature); } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(faceFeature); }
/// <summary> /// 获取多个人脸检测结果中面积最大的人脸 /// </summary> /// <param name="multiFaceInfo">人脸检测结果</param> /// <returns>面积最大的人脸信息</returns> public static SingleFaceInfo GetMaxFace(MultiFaceInfo multiFaceInfo) { SingleFaceInfo singleFaceInfo = new SingleFaceInfo(); singleFaceInfo.faceRect = new MRECT(); singleFaceInfo.faceOrient = 1; int maxArea = 0; int index = -1; //遍历查找最大人脸索引 for (int i = 0; i < multiFaceInfo.faceNum; i++) { try { MRECT rect = multiFaceInfo.faceRects[i]; int area = (rect.right - rect.left) * (rect.bottom - rect.top); if (maxArea <= area) { maxArea = area; index = i; } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } } //获取对应的人脸信息 if (index != -1) { singleFaceInfo.faceRect = multiFaceInfo.faceRects[index]; singleFaceInfo.faceOrient = multiFaceInfo.faceOrients[index]; if (multiFaceInfo.faceID != null && multiFaceInfo.faceID.Length > index) { singleFaceInfo.faceID = multiFaceInfo.faceID[index]; } } return(singleFaceInfo); }
/// <summary> /// 人脸检测 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="image">图像</param> /// <returns></returns> public static MultiFaceInfo DetectFaceAndLandMark(FaceEngine faceEngine, Image image) { MultiFaceInfo multiFaceInfo = null; try { int retCode = faceEngine.ASFDetectFacesEx(image, out multiFaceInfo); if (retCode == 0) { LandMarkInfo landmarkInfo = LandMarkEstimation(faceEngine, image, multiFaceInfo, out retCode); if (retCode == 0) { multiFaceInfo.pointAyy = landmarkInfo.pointAyy; } } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(multiFaceInfo); }
/// <summary> /// 获取多个人脸检测结果中面积最大的人脸 /// </summary> /// <param name="multiFaceInfo">人脸检测结果</param> /// <returns>面积最大的人脸信息</returns> private static async Task <SingleFaceInfo> GetBiggestFaceAsync(MultiFaceInfo multiFaceInfo) => await Task.Run(() => { var singleFaceInfo = new SingleFaceInfo(new Rect(), 1); if (multiFaceInfo.FaceNum <= 0) { return(singleFaceInfo); } var maxArea = 0; foreach (var face in multiFaceInfo.Faces) { var area = (face.FaceRect.Right - face.FaceRect.Left) * (face.FaceRect.Bottom - face.FaceRect.Top); if (area <= maxArea) { continue; } maxArea = area; singleFaceInfo = face; } return(singleFaceInfo); });
/// <summary> /// 额头区域检测 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="imageInfo">图像数据</param> /// <param name="multiFaceInfo">人脸检测结果</param> /// <returns>额头区域检测结构体</returns> public static LandMarkInfo LandMarkEstimation(FaceEngine faceEngine, Image image, MultiFaceInfo multiFaceInfo, out int retCode) { retCode = -1; LandMarkInfo landMarkInfo = new LandMarkInfo(); try { if (multiFaceInfo.faceNum <= 0) { return(landMarkInfo); } //人脸信息检测 retCode = faceEngine.ASFProcessEx(image, multiFaceInfo, FaceEngineMask.ASF_FACELANDMARK); if (retCode == 0) { //获取额头区域检测结果 retCode = faceEngine.ASFGetFaceLandMark(out landMarkInfo); } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(landMarkInfo); }
/// <summary> /// 人脸3D角度检测 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="imageInfo">图像数据</param> /// <param name="multiFaceInfo">检测的人脸信息</param> /// <returns>人脸3D角度检测结果结构体</returns> public static Face3DAngle Face3DAngleDetection(FaceEngine faceEngine, Image image, MultiFaceInfo multiFaceInfo, out int retCode) { retCode = -1; Face3DAngle face3DAngle = new Face3DAngle(); try { if (multiFaceInfo.faceNum <= 0) { return(face3DAngle); } //人脸信息检测 retCode = faceEngine.ASFProcessEx(image, multiFaceInfo, FaceEngineMask.ASF_FACE3DANGLE); if (retCode == 0) { //获取人脸3D角度 retCode = faceEngine.ASFGetFace3DAngle(out face3DAngle); } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(face3DAngle); }
/// <summary> /// 性别检测 /// </summary> /// <param name="pEngine">引擎Handle</param> /// <param name="imageInfo">图像数据</param> /// <param name="multiFaceInfo">检测的人脸信息</param> /// <returns>性别检测结果结构体</returns> public static GenderInfo GenderEstimation(FaceEngine faceEngine, Image image, MultiFaceInfo multiFaceInfo, out int retCode) { retCode = -1; GenderInfo genderInfo = new GenderInfo(); try { if (multiFaceInfo.faceNum <= 0) { return(genderInfo); } //人脸信息检测 retCode = faceEngine.ASFProcessEx(image, multiFaceInfo, FaceEngineMask.ASF_GENDER); if (retCode == 0) { //获取性别信息 retCode = faceEngine.ASFGetGender(out genderInfo); } } catch (Exception ex) { LogUtil.LogInfo(typeof(FaceUtil), ex); } return(genderInfo); }
///<summary> ///图像质量检测 ///推荐阈值0.35 ///图像数据以结构体形式传入,对采用更高字节对齐方式的图像兼容性更好 ///</summary> /// <param name="hEngine">引擎handle</param> /// <param name="imgData">图片数据</param> /// <param name="detectedFaces">人脸位置信息</param> /// <param name="imageQualityInfo">图像质量检测结果</param> /// <param name="detectModel">预留字段,当前版本使用默认参数(ASF_DETECT_MODEL_RGB)即可</param> /// <returns>调用结果</returns> public int ASFImageQualityDetectEx(Image image, MultiFaceInfo multiFaceInfo, out ImageQualityInfo imageQualityInfo, ASF_ImagePixelFormat imageFormat = ASF_ImagePixelFormat.ASVL_PAF_RGB24_B8G8R8, ASF_DetectModel detectModel = ASF_DetectModel.ASF_DETECT_MODEL_RGB) { int retCode = -1; imageQualityInfo = new ImageQualityInfo(); if (multiFaceInfo == null) { return(ErrorCodeUtil.MULPTIFACEINFO_IS_NULL); } if (image == null) { return(ErrorCodeUtil.IMAGE_IS_NULL); } ImageInfo imageInfo = new ImageInfo(); imageInfo = ASF_ImagePixelFormat.ASVL_PAF_RGB24_B8G8R8.Equals(imageFormat) ? ImageUtil.ReadBMP(image) : ImageUtil.ReadBMP_IR(image); if (imageInfo == null) { return(ErrorCodeUtil.IMAGE_DATA_READ_FAIL); } ASF_ImageData asfInfoData = CommonUtil.TransImageDataStructByImageInfo(imageInfo); IntPtr pImageInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_ImageData>()); MemoryUtil.StructureToPtr(asfInfoData, pImageInfo); ASF_ImageQualityInfo ImageQualityInfoStruct = new ASF_ImageQualityInfo(); IntPtr pImageQualityInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_ImageQualityInfo>()); //转化人脸信息 ASF_MultiFaceInfo multiFaceInfoStruct = new ASF_MultiFaceInfo(); IntPtr pMultiFaceInfo = MemoryUtil.Malloc(MemoryUtil.SizeOf <ASF_MultiFaceInfo>()); multiFaceInfoStruct.faceNum = multiFaceInfo.faceNum; if (multiFaceInfo.faceNum > 0) { if (multiFaceInfo.faceID != null) { multiFaceInfoStruct.faceID = MemoryUtil.Malloc(multiFaceInfo.faceNum * MemoryUtil.SizeOf <int>()); Marshal.Copy(multiFaceInfo.faceID, 0, multiFaceInfoStruct.faceID, multiFaceInfo.faceNum); } multiFaceInfoStruct.faceOrients = MemoryUtil.Malloc(multiFaceInfo.faceNum * MemoryUtil.SizeOf <int>()); Marshal.Copy(multiFaceInfo.faceOrients, 0, multiFaceInfoStruct.faceOrients, multiFaceInfo.faceNum); multiFaceInfoStruct.faceRects = MemoryUtil.Malloc(MemoryUtil.SizeOf <MRECT>() * multiFaceInfo.faceNum); byte[] allByte = new byte[MemoryUtil.SizeOf <MRECT>() * multiFaceInfo.faceNum]; for (int i = 0; i < multiFaceInfo.faceNum; i++) { byte[] tempBytes = MemoryUtil.StructToBytes(multiFaceInfo.faceRects[i]); tempBytes.CopyTo(allByte, MemoryUtil.SizeOf <MRECT>() * i); } Marshal.Copy(allByte, 0, multiFaceInfoStruct.faceRects, allByte.Length); } MemoryUtil.StructureToPtr(multiFaceInfoStruct, pMultiFaceInfo); //调用SDK接口 retCode = ASFFunctions.ASFImageQualityDetectEx(pEngine, pImageInfo, pMultiFaceInfo, pImageQualityInfo); if (retCode != 0) { MemoryUtil.FreeArray(imageInfo.imgData, pMultiFaceInfo, pImageInfo, pImageQualityInfo); return(retCode); } ImageQualityInfoStruct = MemoryUtil.PtrToStructure <ASF_ImageQualityInfo>(pImageQualityInfo); MemoryUtil.FreeArray(imageInfo.imgData, pMultiFaceInfo, pImageInfo, pImageQualityInfo); //转化非托管内存到托管内存 imageQualityInfo.num = ImageQualityInfoStruct.num; if (imageQualityInfo.num > 0) { imageQualityInfo.faceQualityValues = new float[imageQualityInfo.num]; Marshal.Copy(ImageQualityInfoStruct.faceQualityValues, imageQualityInfo.faceQualityValues, 0, imageQualityInfo.num); } return(retCode); }