public FaceComparePresenter(FaceCompare view,
                                    IRepositoryFaceComparer comparer)
        {
            this.view = view;
            _comparer = comparer;
            this.exit = false;

            this.Thresholds = new float[] { 60, 58, 55 };
        }
    void Start()
    {
        InitFaceIDSDK();

        faceTrack   = new FaceTrack();
        faceManager = new FaceManager();
        faceCompare = new FaceCompare();

        LoadDBFace();
        StartFaceTrack(deviceId);
    }
Esempio n. 3
0
 private void compareFace(object sender, System.Windows.RoutedEventArgs e)
 {
     if (imgFile1.Length > 0 && imgFle2.Length > 0)
     {
         lab_result.Content = FaceCompare.FaceSimilarity(imgFile1, imgFle2);
     }
     else
     {
         MessageBox.Show("请打开两张图片");
     }
 }
Esempio n. 4
0
    public IEnumerator IStartFaceTrack(int dev, RawImage img, FaceManager _faceManager, FaceCompare _faceCompare)
    {
        faceManager = _faceManager;
        faceCompare = _faceCompare;
        image       = new Mat();
        using (VideoCapture cap = VideoCapture.FromCamera(dev))
        {
            if (!cap.IsOpened())
            {
                Debug.LogError("open camera error");
                yield break;
            }
            // When the movie playback reaches end, Mat.data becomes NULL.
            while (true)
            {
                yield return(null);

                if (isCheck)
                {
                    RotatedRect box;
                    cap.Read(image); // same as cvQueryFrame
                    if (!image.Empty())
                    {
                        int             ilen       = 2;//传入的人脸数
                        TrackFaceInfo[] track_info = new TrackFaceInfo[ilen];
                        for (int i = 0; i < ilen; i++)
                        {
                            track_info[i]           = new TrackFaceInfo();
                            track_info[i].landmarks = new int[144];
                            track_info[i].headPose  = new float[3];
                            track_info[i].face_id   = 0;
                            track_info[i].score     = 0;
                        }
                        int    sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo));
                        IntPtr ptT       = Marshal.AllocHGlobal(sizeTrack * ilen);

                        /*  trackMat
                         *  传入参数: maxTrackObjNum:检测到的最大人脸数,传入外部分配人脸数,需要分配对应的内存大小。
                         *            传出检测到的最大人脸数
                         *    返回值: 传入的人脸数 和 检测到的人脸数 中的最小值,实际返回的人脸。
                         ****/
                        int faceSize = ilen; //返回人脸数  分配人脸数和检测到人脸数的最小值
                        int curSize  = ilen; //当前人脸数 输入分配的人脸数,输出实际检测到的人脸数
                        faceSize = track_mat(ptT, image.CvPtr, ref curSize);
                        for (int index = 0; index < faceSize; index++)
                        {
                            IntPtr ptr = new IntPtr();
                            if (8 == IntPtr.Size)
                            {
                                ptr = (IntPtr)(ptT.ToInt64() + sizeTrack * index);
                            }
                            else if (4 == IntPtr.Size)
                            {
                                ptr = (IntPtr)(ptT.ToInt32() + sizeTrack * index);
                            }

                            track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptr, typeof(TrackFaceInfo));
                            trackFaceInfo     = track_info[index];
                            {
                                //face_info[index] = (FaceInfo)Marshal.PtrToStructure(info_ptr, typeof(FaceInfo));
                                //Debug.Log("in Liveness::usb_track face_id is {0}:" + track_info[index].face_id);
                                //Debug.Log("in Liveness::usb_track landmarks is:");
                                //for (int k = 0; k < 1; k++)
                                //{
                                //    Debug.Log(
                                //        track_info[index].landmarks[k + 0] + "," + track_info[index].landmarks[k + 1] + "," +
                                //        track_info[index].landmarks[k + 2] + "," + track_info[index].landmarks[k + 3] + "," +
                                //        track_info[index].landmarks[k + 4] + "," + track_info[index].landmarks[k + 5] + "," +
                                //        track_info[index].landmarks[k + 6] + "," + track_info[index].landmarks[k + 7] + "," +
                                //        track_info[index].landmarks[k + 8] + "," + track_info[index].landmarks[k + 9]
                                //        );
                                //}
                                //for (int k = 0; k < track_info[index].headPose.Length; k++)
                                //{
                                //    Debug.Log("in Liveness::usb_track angle is:" + track_info[index].headPose[k]);
                                //}
                                //Debug.Log("in Liveness::usb_track score is:" + track_info[index].score);
                                //// 角度
                                //Debug.Log("in Liveness::usb_track mAngle is:" + track_info[index].box.mAngle);
                                //// 人脸宽度
                                //Debug.Log("in Liveness::usb_track mWidth is:" + track_info[index].box.mWidth);
                                //// 中心点X,Y坐标
                                //Debug.Log("in Liveness::usb_track mCenter_x is:" + track_info[index].box.mCenter_x);
                                //Debug.Log("in Liveness::usb_track mCenter_y is:" + track_info[index].box.mCenter_y);
                            }
                            // 画人脸框
                            box = bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length);
                            draw_rotated_box(ref image, ref box, new Scalar(0, 255, 0));
                            xcenter = image.Width / 2;
                            ycenter = image.Height / 2;
                        }
                        Marshal.FreeHGlobal(ptT);
                        if (videoTexture == null)
                        {
                            videoTexture = new Texture2D(image.Width, image.Height);
                        }
                        videoTexture.LoadImage(image.ToBytes());
                        videoTexture.Apply();
                        img.texture = videoTexture;
                        //imgBytes = image.ToBytes();
                        Cv2.WaitKey(1);
                    }
                    else
                    {
                    }
                }
            }
            image.Release();
        }
    }