示例#1
0
        public float depthLiveness(byte[] data, int width, int height, int[] landmarks)
        {
            float depthScore = FaceSDK.Run_livenessSilentPredictByte(FaceSDK.LivenessTypeId.LiveidDepth, data,
                                                                     height, width, 2, landmarks);

            return(depthScore);
        }
示例#2
0
        public float irLiveness(byte[] data, int width, int height, int[] landmarks)
        {
            float irScore = FaceSDK.Run_livenessSilentPredictByte(FaceSDK.LivenessTypeId.LiveidIr, data,
                                                                  height, width, 0, landmarks);

            return(irScore);
        }
示例#3
0
        public float rgbLiveness(int[] data, int width, int height, int[] landmarks)
        {
            float rgbScore = FaceSDK.Run_livenessSilentPredict(FaceSDK.LivenessTypeId.LiveidVis, data,
                                                               height, width, 24, landmarks);

            return(rgbScore);
        }
示例#4
0
 /**
  * 进行属性检测
  */
 private void attrCheck(FaceInfo faceInfo, ImageFrame imageFrame)
 {
     // todo 人脸属性数据获取
     mFaceAttribute = FaceSDK.FaceAttribute(imageFrame.getArgb(),
                                            imageFrame.getWidth(), imageFrame.getHeight(), FaceSDK.ImgType.Argb,
                                            faceInfo.Landmarks.ToArray());
     RunOnUiThread(new Runnable(() => { tvAttr.Text = ("人脸属性:" + getMsg(mFaceAttribute)); }));
 }
示例#5
0
 protected override void OnCreate(Bundle savedInstanceState)
 {
     RequestWindowFeature(WindowFeatures.NoTitle);
     Window.AddFlags(WindowManagerFlags.Fullscreen);
     base.OnCreate(savedInstanceState);
     SetContentView(Resource.Layout.activity_rgb_ir_video_attribute);
     FaceSDK.InitModel(this);
     findView();
     FaceSDK.FaceAttributeModelInit(this);
     FaceLiveness.getInstance().setLivenessCallBack(this);
 }
示例#6
0
        private void dealRgb(byte[] data)
        {
            if (rgbData == null)
            {
                int[] argb = new int[PREFER_WIDTH * PERFER_HEIGH];
                FaceSDK.GetARGBFromYUVimg(data, argb, PREFER_WIDTH, PERFER_HEIGH, 0, 0);

                rgbData = argb;
                checkData();
                Bitmap bitmap = Bitmap.CreateBitmap(argb, PREFER_WIDTH, PERFER_HEIGH, Bitmap.Config.Argb8888);
                RunOnUiThread(new Runnable(() => { testIv.SetImageBitmap(bitmap); }));
            }
        }
示例#7
0
        private void dealIr(byte[] data)
        {
            if (irData == null)
            {
                // int[] argb = new int[640 * 480];
                // FaceSDK.getARGBFromYUVimg(data, argb, 480, 640, 0, 0);

                niRargb = new int[PREFER_WIDTH * PERFER_HEIGH];
                FaceSDK.GetARGBFromYUVimg(data, niRargb, PERFER_HEIGH, PREFER_WIDTH, 0, 0);

                byte[] ir = new byte[PREFER_WIDTH * PERFER_HEIGH];
                System.Array.Copy(data, 0, ir, 0, PREFER_WIDTH * PERFER_HEIGH);
                irData = ir;
                checkData();
            }
        }
示例#8
0
 public static void run(string key)
 {
     if (SsdkInitListener != null)
     {
         SsdkInitListener.initStart();
     }
     Log.Error("FaceSDK", "初始化授权");
     FaceSDK.InitLicense(Context, key, LICENSE_NAME, false);
     if (!sdkInitStatus())
     {
         return;
     }
     Log.Error("FaceSDK", "初始化sdk");
     faceDetector.init(Context);
     faceFeature.init(Context);
     initLiveness(Context);
 }
示例#9
0
        public void OnCreate(Bundle savedInstanceState)
        {
            RequestWindowFeature(WindowFeatures.NoTitle);
            Window.AddFlags(WindowManagerFlags.Fullscreen);
            base.OnCreate(savedInstanceState);
            SetContentView(Resource.Layout.activity_rgb_ir_liveness);
            FaceSDK.InitModel(this);
            findView();

            FaceLiveness.getInstance().setLivenessCallBack(this);

            Intent intent = Intent;

            if (intent != null)
            {
                source = intent.GetIntExtra("source", -1);
            }
        }
        private void init()
        {
            faceDetectManager = new FaceDetectManager(this);
            FaceSDK.FaceAttributeModelInit(this);
            // 从系统相机获取图片帧。
            CameraImageSource cameraImageSource = new CameraImageSource(this);

            // 图片越小检测速度越快,闸机场景640 * 480 可以满足需求。实际预览值可能和该值不同。和相机所支持的预览尺寸有关。
            // 可以通过 camera.getParameters().getSupportedPreviewSizes()查看支持列表。
            cameraImageSource.getCameraControl().setPreferredPreviewSize(1280, 720);

            // 设置最小人脸,该值越小,检测距离越远,该值越大,检测性能越好。范围为80-200

            // 设置预览
            cameraImageSource.setPreviewView(previewView);
            // 设置图片源
            faceDetectManager.setImageSource(cameraImageSource);

            textureView.SetOpaque(false);
            // 不需要屏幕自动变黑。
            textureView.KeepScreenOn = (true);
            bool isPortrait = Resources.Configuration.Orientation == Android.Content.Res.Orientation.Portrait;

            if (isPortrait)
            {
                previewView.setScaleType(ScaleType.FIT_WIDTH);
                // 相机坚屏模式
                cameraImageSource.getCameraControl().setDisplayOrientation(App1.Droid.BaiduSDKManager.Face.Camera.CameraView.ORIENTATION_PORTRAIT);
            }
            else
            {
                previewView.setScaleType(ScaleType.FIT_HEIGHT);
                // 相机横屏模式
                cameraImageSource.getCameraControl().setDisplayOrientation(CameraView.ORIENTATION_HORIZONTAL);
            }

            setCameraType(cameraImageSource);
        }
示例#11
0
 /**
  * 初始化 活体检测
  *
  * @param context
  */
 private static void initLiveness(Context context)
 {
     FaceSDK.LivenessSilentInit(context, FaceSDK.LivenessTypeId.LiveidVis, 2);
     FaceSDK.LivenessSilentInit(context, FaceSDK.LivenessTypeId.LiveidIr);
     FaceSDK.LivenessSilentInit(context, FaceSDK.LivenessTypeId.LiveidDepth);
 }
示例#12
0
 /**
  * 根据设备的cpu核心数设定人脸sdk使用的线程数,如双核设置为2,四核设置为4
  * @param numberOfThreads
  */
 public void setNumberOfThreads(int numberOfThreads)
 {
     FaceSDK.SetNumberOfThreads(numberOfThreads);
 }
示例#13
0
 /// <summary>
 /// yuv图片转换为相应的argb;
 /// </summary>
 /// <param name="yuv">      yuv_420p图片 </param>
 /// <param name="width">    图片宽度 </param>
 /// <param name="height">   图片高度 </param>
 /// <param name="argb">     接收argb用得 int数组 </param>
 /// <param name="rotation"> yuv图片的旋转角度 </param>
 /// <param name="mirror">   是否为镜像 </param>
 public static void yuvToARGB(byte[] yuv, int width, int height, int[] argb, int rotation, int mirror)
 {
     FaceSDK.GetARGBFromYUVimg(yuv, argb, width, height, rotation, mirror);
 }
示例#14
0
        // 活体检测
        private bool onLivenessCheck(int width, int height, int type)
        {
            bool isLiveness = false;
            // 判断当前是否有人脸
            long startTime = DateTime.Now.Millisecond;
            int  errorCode = FaceSDKManager.getInstance().getFaceDetector().detect(mRgbArray, width, height);

            LivenessModel livenessModel = new LivenessModel();

            livenessModel.setRgbDetectDuration(DateTime.Now.Millisecond - startTime);
            livenessModel.getImageFrame().setArgb(mRgbArray);
            livenessModel.getImageFrame().setWidth(width);
            livenessModel.getImageFrame().setHeight(height);
            livenessModel.setLiveType(type);
            livenessModel.setFaceDetectCode(errorCode);
            Log.Debug(TAG, "max_face_verification: " + errorCode + " duration:" + (DateTime.Now.Millisecond - startTime));

            if (errorCode == FaceTracker.ErrCode.Ok.Ordinal() || errorCode == FaceTracker.ErrCode.DataHitLast.Ordinal())
            {
                FaceInfo[] trackedface = FaceSDKManager.getInstance().getFaceDetector().TrackedFaces;
                livenessModel.setTrackFaceInfo(trackedface);
                if (trackedface != null && trackedface.Length > 0)
                {
                    FaceInfo faceInfo = trackedface[0];
                    livenessModel.setFaceInfo(faceInfo);

                    // 塞选人脸,可以调节距离、角度
                    //                if (!filter(faceInfo, width, height)) {
                    //                    livenessCallBack.onCallback(null);
                    //                    return isLiveness;
                    //                }
                    if (livenessCallBack != null)
                    {
                        livenessCallBack.onTip(0, "活体判断中");
                    }
                    float rgbScore = 0;
                    if ((type & MASK_RGB) == MASK_RGB)
                    {
                        startTime = DateTime.Now.Millisecond;
                        rgbScore  = rgbLiveness(mRgbArray, width, height, FaceFeature.PerformQuery <int>(trackedface[0].Landmarks));
                        livenessModel.setRgbLivenessScore(rgbScore);
                        livenessModel.setRgbLivenessDuration(DateTime.Now.Millisecond - startTime);
                    }
                    float irScore = 0;
                    if ((type & MASK_IR) == MASK_IR)
                    {
                        float maxWidth    = 0;
                        int   maxId       = 0;
                        float detectScore = 0;
                        if (trackedface != null && trackedface.Length > 0)
                        {
                            for (int i = 0; i < trackedface.Length; i++)
                            {
                                if (trackedface[i].MWidth > maxWidth)
                                {
                                    maxId       = i;
                                    maxWidth    = trackedface[i].MWidth;
                                    detectScore = trackedface[i].MConf;
                                }
                            }
                        }
                        if (trackedface != null)
                        {
                            float[] faceT = new float[] { trackedface[maxId].MCenterX,
                                                          trackedface[maxId].MCenterY, trackedface[maxId].MWidth,
                                                          trackedface[maxId].MAngle };
                            int[]   shape  = new int[144];
                            int[]   nPoint = new int[] { 0 };
                            float[] score  = new float[] { 0.0F };
                            FaceSDK.Run_align(nirRgbArray, height, width, FaceSDK.ImgType.Argb,
                                              FaceSDK.AlignMethodType.Cdnn, faceT, shape, nPoint, score, detectScore);
                            livenessModel.setShape(shape);
                            startTime = DateTime.Now.Millisecond;
                            //                    irScore = irLiveness(mIrByte, width, height, trackedfaces[0].landmarks);
                            irScore = irLiveness(mIrByte, width, height, shape);
                            livenessModel.setIrLivenessDuration(DateTime.Now.Millisecond - startTime);
                            livenessModel.setIrLivenessScore(irScore);
                        }
                    }
                    float depthScore = 0;
                    if ((type & MASK_DEPTH) == MASK_DEPTH)
                    {
                        startTime = DateTime.Now.Millisecond;
                        if (trackedface != null)
                        {
                            depthScore = depthLiveness(mDepthArray, width, height, FaceFeature.PerformQuery <int>(trackedface[0].Landmarks));
                            livenessModel.setDetphtLivenessDuration(DateTime.Now.Millisecond - startTime);
                            livenessModel.setDepthLivenessScore(depthScore);
                        }
                    }
                    if (livenessCallBack != null)
                    {
                        livenessCallBack.onCallback(livenessModel);
                    }

                    //                long time = System.currentTimeMillis();
                    //                saveRgbImage(String.valueOf(time), rgbScore, mRgbArray, width, height);
                    //                saveFile(String.valueOf(time), "nir", irScore, mIrByte);
                    //                saveFile(String.valueOf(time), "depth", depthScore, mDepthArray);
                }
            }
            else
            {
                checkFaceCode(errorCode);
                if (livenessCallBack != null)
                {
                    livenessCallBack.onCallback(null);
                }
            }
            // clearInfo();
            FaceInfo[] trackedfaces = FaceSDKManager.getInstance().getFaceDetector().TrackedFaces;
            livenessModel.setTrackFaceInfo(trackedfaces);
            if (livenessCallBack != null)
            {
                livenessCallBack.onCanvasRectCallback(livenessModel);
            }
            return(isLiveness);
        }