예제 #1
0
        public ClinicInfoManageViewModel()
        {
            if (App.Current != null)
            {
                _esClinicContext = App.Current.EsClinicContext;
            }

            LogoImg = new BitmapImage(new Uri("/ESClinic;component/Resources/Placeholder.jpg", UriKind.Relative));
            NotifyOfPropertyChange(() => LogoImg);

            var info = _esClinicContext.ClinicInfoes.FirstOrDefault();

            if (info != null)
            {
                _info         = info;
                ClinicName    = info.Name;
                ClinicAddress = info.Address;
                ClinicPhone   = info.Phone;
                if (info.Logo != null)
                {
                    LogoImg = ImageDataConverter.BytesToBitmapImage(info.Logo);
                    NotifyOfPropertyChange(() => LogoImg);
                }
            }
            else
            {
                _info = new ClinicInfo();
                _esClinicContext.ClinicInfoes.Add(_info);
                _esClinicContext.SaveChanges();
            }
        }
예제 #2
0
        public HomeViewModel()
        {
            _user = App.Current.User;

            var clinic = App.Current.EsClinicContext.ClinicInfoes.FirstOrDefault();

            if (clinic == null)
            {
                if (_user.ManagementAccessible)
                {
                    MessageBox.Show(InfoMessages.WRN_FILL_CLINIC_INFO);
                }
            }
            else
            {
                ClinicName    = clinic.Name;
                ClinicAddress = clinic.Address;
                ClinicPhone   = clinic.Phone;
                NotifyOfPropertyChange(() => ClinicName);
                NotifyOfPropertyChange(() => ClinicAddress);
                NotifyOfPropertyChange(() => ClinicPhone);
                if (clinic.Logo != null)
                {
                    LogoImage = ImageDataConverter.BytesToBitmapImage(clinic.Logo);
                    NotifyOfPropertyChange(() => LogoImage);
                }
            }
        }
예제 #3
0
 /// <summary>
 /// 人脸信息检测(年龄/性别/人脸3D角度)最多支持4张人脸信息检测,超过部分返回未知
 /// </summary>
 /// <param name="bitmap"></param>
 /// <param name="detectedFaces">检测到的人脸信息</param>
 /// <param name="combinedMask">初始化中参数combinedMask与ASF_AGE| ASF_GENDER| ASF_FACE3DANGLE的交集的子集</param>
 public void FaceProcess(Bitmap bitmap, MultiFaceModel detectedFaces, uint combinedMask = ArcFaceFunction.AGE | ArcFaceFunction.FACE_3DANGLE | ArcFaceFunction.GENDER)
 {
     using (ImageData imgData = ImageDataConverter.ConvertToImageData(bitmap))
     {
         FaceProcess(imgData.Width, imgData.Height, imgData.Format, imgData.PImageData, detectedFaces, combinedMask);
     }
 }
예제 #4
0
 void reportDocument_ImageProcessing(object sender, ImageEventArgs e)
 {
     if (_clinic.Logo != null)
     {
         e.Image.Source = ImageDataConverter.BytesToBitmapImage(_clinic.Logo);
     }
 }
예제 #5
0
        private void обработатьПланшетПреобученнойСетьюToolStripMenuItem_Click(object sender, EventArgs e)
        {
            BoxerDLL.BoxCut2D cutter = new BoxerDLL.BoxCut2D(7, 10, 0.143f, 0.1f);
            if (openImageDialog.ShowDialog() == DialogResult.OK)
            {
                Bitmap       map    = new Bitmap(openImageDialog.FileName);
                RectangleF[] rects  = cutter.Rects1D(map.Width, map.Height);
                Color[]      colors = new Color[rects.Length];
                Color        plus   = Color.FromArgb(100, Color.Red);
                Color        minus  = Color.FromArgb(100, Color.Green);
                var          maps   = cutter.Enumerator(ref map);
                int          j      = 0;
                //List<double[,,]> rgbs = new List<double[,,]>();
                do
                {
                    double[,,] output = pretrain.ProcessAutoEncoder(ImageDataConverter.GetRGBArray(new Bitmap(maps.Current, NetworkData.image_size)));
                    GC.Collect();
                    colors[j] = output[0, 0, 0] > output[0, 0, 1] ? plus : minus;
                    j++;
                } while (maps.MoveNext());

                var bitmap = DrawColoredRects(map, rects, colors);

                ImageShow img_show = new ImageShow(bitmap);
                img_show.Show();
            }
        }
        public EndoscopyRecordViewModel(EndoscopyRecord esRecord)
        {
            DisplayName = "Xem kết quả nội soi";
            if (App.Current != null)
            {
                _esClinicContext = App.Current.EsClinicContext;
            }

            EsImages = new BindableCollection <BitmapImage>();
            var esImages =
                _esClinicContext.EndoscopyPhotoes.ToList().Where(e => e.EndoscopyRecordId == esRecord.EndoscopyRecordId);

            foreach (var image in esImages)
            {
                EsImages.Add(ImageDataConverter.BytesToBitmapImage(image.Photo));
            }

            var type = _esClinicContext.EndoscopyTypes.ToList()
                       .FirstOrDefault(e => e.EndoScopyTypeId == esRecord.EndoscopyTypeId);

            if (type != null)
            {
                ResultTitle = "Mục nội soi: " + type.Name;
            }
            NotifyOfPropertyChange(() => ResultTitle);

            Result = esRecord.Result;
            NotifyOfPropertyChange(() => Result);
        }
예제 #7
0
        /// <summary>
        /// 人脸检测 后续如需要人脸识别则不推荐使用这个接口,建议用 ImageDataConverter 转换成 ImageData 再使用别的接口
        /// </summary>
        /// <param name="bitmap"></param>
        /// <param name="deepCopy">返回结果是否进行深拷贝,默认为true,若设置为false则每次调用会覆盖上一次的结果(内存覆盖)</param>
        /// <returns></returns>
        public MultiFaceModel FaceDetection(Bitmap bitmap, bool deepCopy = true)
        {
            MultiFaceModel result;

            using (ImageData imgData = ImageDataConverter.ConvertToImageData(bitmap))
            {
                result = FaceDetection(imgData, deepCopy);
            }
            return(result);
        }
예제 #8
0
        /// <summary>
        /// 单人脸特征提取
        /// </summary>
        /// <param name="bitmap"></param>
        /// <param name="faceInfo">单张人脸位置和角度信息</param>
        /// <param name="deepCopy">返回结果是否进行深拷贝,默认为true,若设置为false则每次调用会覆盖上一次的结果(内存覆盖)</param>
        /// <returns>人脸特征信息</returns>
        public AsfFaceFeature FaceFeatureExtract(Bitmap bitmap, ref AsfSingleFaceInfo faceInfo, bool deepCopy = true)
        {
            AsfFaceFeature result;

            using (ImageData imgData = ImageDataConverter.ConvertToImageData(bitmap))
            {
                result = FaceFeatureExtract(imgData, ref faceInfo, deepCopy);
            }
            return(result);
        }
예제 #9
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (openFileDialog1.ShowDialog() == DialogResult.OK)
            {
                vis = new NeuralNetwork.NetworkVisualizer(NetworkData.network,
                                                          ImageDataConverter.GetRGBArray(new Bitmap(new Bitmap(openFileDialog1.FileName), NetworkData.image_size)));

                pictureBox2.Image = new Bitmap(new Bitmap(openFileDialog1.FileName), NetworkData.image_size);
            }
        }
예제 #10
0
 public void Save()
 {
     _info.Name    = ClinicName;
     _info.Address = ClinicAddress;
     _info.Phone   = ClinicPhone;
     if (_isLogoChanged)
     {
         _info.Logo = ImageDataConverter.BitmapImageToBytes(LogoImg);
     }
     _esClinicContext.Entry(_info).State = EntityState.Modified;
     _esClinicContext.SaveChanges();
 }
예제 #11
0
    public Class1()
    {
        Bitmap grayscale = (Bitmap)GrayscaleUtils.ColorToGrayscale(inputImagePictureBox.Image);

        grayscaleImagePictureBox.Image = grayscale;

        int[,] array2D = ImageDataConverter.BitmapToArray2D(grayscale);

        Bitmap grayScaleAgain = ImageDataConverter.Array2DToBitmap(array2D);

        processedImagePictureBox.Image = grayScaleAgain;
    }
예제 #12
0
        private void загрузитьОбучающиеДанныеToolStripMenuItem_Click(object sender, EventArgs e)
        {
            if (openImageDialog.ShowDialog() == DialogResult.OK)
            {
                string path = openImageDialog.FileName;
                path = path.Replace("\\" + openImageDialog.SafeFileName, "");
                path = path.Replace("\\" + path.Split('\\').Last(), "");

                NetworkData.train = new BitmapCatEnumerator(path, NetworkData.image_size);

                var pair = NetworkData.train.GetRandom(ref NetworkData.network);
                pictureBox1.Image = ImageDataConverter.GetImage(pair.Key.ConvertToRGD());
            }
        }
예제 #13
0
        private void начатьToolStripMenuItem_Click(object sender, EventArgs e)
        {
            train_index = 0;
            if (NetworkData.optimizer == null)
            {
                MessageBox.Show("Не выбран оптимизатор", "Ошибка");
                return;
            }

            var pair = NetworkData.train.GetRandom(ref NetworkData.network);

            pictureBox1.Image = ImageDataConverter.GetImage(pair.Key.ConvertToRGD());
            test_arr          = pair.Key;
            test_res          = pair.Value;

            NetworkData.optimizer.TrainBatch(NetworkData.train, 16, 1);
            StartTrain();
            timer1.Start();
        }
예제 #14
0
        public PretrainMaster()
        {
            InitializeComponent();

            Network network = NetworkData.network;

            for (int i = 0; i < network.layers.Count; i++)
            {
                listBox1.Items.Add(network.layers[i].GetType().Name + " {" + network.layers[i].output_size[0] + ","
                                   + network.layers[i].output_size[1] + "," + network.layers[i].output_size[2] + "}");
            }

            test_arr = NetworkData.train.GetRandom(ref network).Key;
            base_arr = test_arr;
            testImg  = ImageDataConverter.GetImage(test_arr.ConvertToRGD());
            //pictureBox1.Image = testImg;

            pretrain = new FullConPretrain(ref NetworkData.network);
        }
예제 #15
0
        public void Finished()
        {
            try
            {
                SaveEsResult();
                var esRecord = new EndoscopyRecord()
                {
                    EndoscopyTypeId = SelectedEsType.EndoScopyTypeId,
                    SessionId       = _sessionId,
                    Result          = _esResult
                };
                _esClinicContext.EndoscopyRecords.Add(esRecord);
                _esClinicContext.SaveChanges();

                SelectPhotos();
                if (Photos.Count > 6)
                {
                    MessageBox.Show("Số hình nội soi nhiều hơn 6 tấm!");
                    return;
                }
                foreach (var esPhoto in Photos.Select(photo => new EndoscopyPhoto()
                {
                    EndoscopyRecordId = esRecord.EndoscopyRecordId,
                    Photo = ImageDataConverter.BitmapSourceToBytes(photo.Photo)
                }))
                {
                    _esClinicContext.EndoscopyPhotoes.Add(esPhoto);
                }
                _esClinicContext.SaveChanges();

                _events.Publish(esRecord, action => { Task.Factory.StartNew(action); });
            }
            catch (Exception)
            {
                MessageBox.Show("Dữ liệu không đầy đủ!\n\nVui lòng kiểm lại thông tin mục nội soi.");
            }
            finally
            {
                TryClose();
            }
        }
예제 #16
0
 private void reportDocument_ImageProcessing(object sender, ImageEventArgs e)
 {
     if (e.Image.Name == "ClinicLogo")
     {
         if (_clinic.Logo != null)
         {
             e.Image.Source = ImageDataConverter.BytesToBitmapImage(_clinic.Logo);
         }
     }
     else
     {
         if (_count < _nPhoto)
         {
             e.Image.Source = ImageDataConverter.BytesToBitmapImage(_esRecord.EndoscopyPhotoes.ElementAt(_count).Photo);
         }
         else
         {
             e.Image.Visibility = Visibility.Collapsed;
         }
         _count++;
     }
 }
예제 #17
0
        void Translate()
        {
            Bitmap map = new Bitmap(planes[index]);

            RectangleF[] rects  = cutter.Rects1D(map.Width, map.Height);
            Color[]      colors = new Color[rects.Length];
            Color        plus   = Color.FromArgb(100, Color.Red);
            Color        minus  = Color.FromArgb(100, Color.Green);
            var          maps   = cutter.Enumerator(ref map);
            int          j      = 0;

            //List<double[,,]> rgbs = new List<double[,,]>();
            do
            {
                double[,,] output = NetworkData.network.GetOutput(ImageDataConverter.GetRGBArray(new Bitmap(maps.Current, NetworkData.image_size)));
                GC.Collect();
                colors[j] = output[0, 0, 0] > output[0, 0, 1] ? plus : minus;
                j++;
            } while (maps.MoveNext());

            var bitmap = DrawColoredRects(map, rects, colors);

            pictureBox1.Image = bitmap;
        }
예제 #18
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (openFileDialog1.ShowDialog() == DialogResult.OK)
            {
                Bitmap map = new Bitmap(openFileDialog1.FileName);

                BoxerDLL.BoxCut2D   cutter = new BoxCut2D(7, 10, 0.143f, 0.1f);
                List <Bitmap>       maps   = cutter.Cut(map);
                List <double[, , ]> rgbs   = new List <double[, , ]>();
                for (int i = 0; i < maps.Count; i++)
                {
                    maps[i] = new Bitmap(maps[i], new System.Drawing.Size(24, 12));
                    rgbs.Add(ImageDataConverter.GetRGBArray(maps[i]));
                }


                RectangleF[] rects = cutter.Rects1D(map.Width, map.Height);

                BitmapCatEnumerator enums   = new BitmapCatEnumerator("Sorted", new System.Drawing.Size(24, 12));
                BitmapCatEnumerator val     = new BitmapCatEnumerator("Val", new System.Drawing.Size(24, 12));
                Network             network = new Network();
                network.AddLayer(new Dropout(new Relu(), 0.05));
                network.AddLayer(new Conv2D(new Relu(), 7, 7, 32));
                network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
                network.AddLayer(new Dropout(new Relu(), 0.05));

                network.AddLayer(new Conv2D(new Relu(), 3, 3, 64));
                network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
                network.AddLayer(new Dropout(new Relu(), 0.05));

                network.AddLayer(new FullyConnLayar(new Sigmoid(), new NeuralNetwork.Size(1, 1, 256)));
                network.AddLayer(new FullyConnLayar(new Sigmoid(), new NeuralNetwork.Size(1, 1, 2)));

                network.Compile(new NeuralNetwork.Size(3, 12, 24), true);

                network.Normalization();

                pictureBox1.Image = map;

                MomentumParallel sgd = new MomentumParallel(network, 0.9, 1e-4);

                int last = 0;

                for (int i = 0; i < 1000; i++)
                {
                    var errors = sgd.TrainBatchPercent(enums, 32, 1);

                    if (errors.Last().Value > 90 || i - last > 20)
                    {
                        last = i;
                        Color[] colors = new Color[rects.Length];
                        Color   plus   = Color.FromArgb(100, Color.Red);
                        Color   minus  = Color.FromArgb(100, Color.Green);

                        for (int j = 0; j < maps.Count; j++)
                        {
                            double[,,] output = network.GetOutput(rgbs[j]);

                            colors[j] = output[0, 0, 0] > output[0, 0, 1] ? plus : minus;
                        }

                        pictureBox1.Image = DrawColoredRects(map, rects, colors);

                        var validation = network.GetError(val);
                        label2.Text = "Validation\nError: " + validation.Key + "\nPercent: " + validation.Value;
                    }
                    label1.Text = i + "\nError: " + errors.Last().Key + "\nPercent: " + errors.Last().Value;
                    Update();
                }
            }
        }
예제 #19
0
        public void TestMethod1()
        {
            // SDK对应的 APP_ID SDK_KEY
            string APP_ID = @"7NK7KSpfgxdqb74r8nvy36kDwH3wVGstr2LHGHBxQ8LY";

            string SDK_KEY = @"3fD8vKYMNfPzKHMoqppjA9chGh2aGkWzUQNFiAj7Yq63";

            // 加载图片
            Bitmap heying = new Bitmap(@"heying.jpg");

            Bitmap face1 = new Bitmap(@"ldh0.jpg");
            Bitmap face2 = new Bitmap(@"ldh1.jpg");

            Bitmap face3 = new Bitmap(@"zxy0.jpg");

            // 创建 ArcFaceCore 对象,向构造函数传入相关参数进行 ArcFace 引擎的初始化
            ArcFaceCore arcFace = new ArcFaceCore(APP_ID, SDK_KEY, ArcFaceDetectMode.IMAGE,
                                                  ArcFaceFunction.FACE_DETECT | ArcFaceFunction.FACE_RECOGNITION | ArcFaceFunction.AGE | ArcFaceFunction.FACE_3DANGLE | ArcFaceFunction.GENDER, DetectionOrientPriority.ASF_OP_0_ONLY, 50, 32);

            // 将 Bitmap 转换成 ImageData
            ImageData heyingImgData = ImageDataConverter.ConvertToImageData(heying);

            // 人脸检测
            // 也可直接传入 Bitmap 来调用相关接口 会自动转换成 ImageData,但这里推荐用 ImageData
            MultiFaceModel multiFaceB = arcFace.FaceDetection(heying);
            // 传入 ImageData ,推荐使用这个接口
            MultiFaceModel multiFace = arcFace.FaceDetection(heyingImgData);

            // 人脸信息检测(年龄/性别/人脸3D角度)最多支持4张人脸信息检测,超过部分返回未知 这是官方文档的说明
            arcFace.FaceProcess(heyingImgData, multiFace);

            // 获取年龄信息
            List <int> ageList = arcFace.GetAge();
            // 获取性别信息
            List <int> genderList = arcFace.GetGender();
            // 获取人脸角度信息
            List <Face3DAngleModel> face3DAngleList = arcFace.GetFace3DAngle();


            // 将第一张图片的 Bitmap 转换成 ImageData
            ImageData faceData1 = ImageDataConverter.ConvertToImageData(face1);

            // 检测第一张图片中的人脸
            MultiFaceModel multiFace1 = arcFace.FaceDetection(faceData1);

            // 取第一张图片中返回的第一个人脸信息
            AsfSingleFaceInfo faceInfo1 = multiFace1.FaceInfoList.First();

            // 提第一张图片中返回的第一个人脸的特征
            AsfFaceFeature faceFeature1 = arcFace.FaceFeatureExtract(faceData1, ref faceInfo1);



            ImageData faceData2 = ImageDataConverter.ConvertToImageData(face2);

            // 检测第二张图片中的人脸
            MultiFaceModel multiFace2 = arcFace.FaceDetection(faceData2);

            // 取第二张图片中返回的第一个人脸信息
            AsfSingleFaceInfo faceInfo2 = multiFace2.FaceInfoList.First();

            // 提第二张图片中返回的第一个人脸的特征
            AsfFaceFeature faceFeature2 = arcFace.FaceFeatureExtract(faceData2, ref faceInfo2);



            // face1 face2 人脸对比,将会返回一个 0-1 之间的浮点数值
            float result = arcFace.FaceCompare(faceFeature1, faceFeature2);



            ImageData faceData3 = ImageDataConverter.ConvertToImageData(face3);

            // 检测第二张图片中的人脸
            MultiFaceModel multiFace3 = arcFace.FaceDetection(faceData3);

            // 取第二张图片中返回的第一个人脸信息
            AsfSingleFaceInfo faceInfo3 = multiFace3.FaceInfoList.First();

            // 提第二张图片中返回的第一个人脸的特征
            AsfFaceFeature faceFeature3 = arcFace.FaceFeatureExtract(faceData3, ref faceInfo3);

            // face1 face3 人脸对比,将会返回一个 0-1 之间的浮点数值
            float result2 = arcFace.FaceCompare(faceFeature1, faceFeature3);


            // 释放销毁引擎
            arcFace.Dispose();
            // ImageData使用完之后记得要 Dispose 否则会导致内存溢出
            faceData1.Dispose();
            faceData2.Dispose();
            // BItmap也要记得 Dispose
            face1.Dispose();
            face2.Dispose();
        }
예제 #20
0
 public void OnImageCaptured(IFrameSource frameSource,
                             Frame frame, double fps)
 {
     ImgVideo.Dispatcher.BeginInvoke(
         (Action)(() => ImgVideo.Source = ImageDataConverter.BitmapToBitmapSource(frame.Image)));
 }
예제 #21
0
        /// <summary>
        /// https://github.com/Thxzzzzz/ArcFaceSharp
        /// </summary>
        /// <param name="videoPath"></param>
        /// <param name="videoTitle"></param>
        static void RunOnArcFace(string videoPath, string videoTitle)
        {
            // 视频地址
            VideoCapture capture = new VideoCapture();//@"D:\ca1af880d3653be69ed6d9ce55058c21.mp4"

            capture.Open(videoPath);
            Window      win         = new Window(videoTitle);
            ArcFaceCore arcFaceCore = new ArcFaceCore(APP_ID, SDK_KEY, ArcFaceDetectMode.VIDEO,
                                                      ArcFaceFunction.FACE_DETECT | ArcFaceFunction.FACE_RECOGNITION | ArcFaceFunction.AGE | ArcFaceFunction.FACE_3DANGLE | ArcFaceFunction.GENDER,
                                                      DetectionOrientPriority.ASF_OP_0_ONLY, 50, 32);

            while (true)
            {
                Mat image = new Mat();
                capture.Read(image);
                if (image.Empty())
                {
                    continue;
                }

                Bitmap    bitmap    = image.ToBitmap();
                ImageData imageData = ImageDataConverter.ConvertToImageData(bitmap);
                //人脸检测
                MultiFaceModel multiFaceModel = arcFaceCore.FaceDetection(imageData, false);

                // 人脸信息检测 先调用这个接口才能获取以下三个信息
                arcFaceCore.FaceProcess(imageData, multiFaceModel);
                //获取年龄信息
                List <int> ageList = arcFaceCore.GetAge();
                foreach (var item in ageList)
                {
                    Console.WriteLine("Age:" + item);
                }
                // 获取性别信息
                List <int> genderList = arcFaceCore.GetGender();
                foreach (var item in genderList)
                {
                    Console.WriteLine("Sex:" + item);
                }
                // 获取人脸角度信息
                List <Face3DAngleModel> face3DAngleList = arcFaceCore.GetFace3DAngle();
                //foreach (var item in face3DAngleList)
                //{
                //    Console.WriteLine("Face3D:" + item.);
                //}
                //asfSingleFaceInfo 为人脸检测接口返回的人脸信息中的其中一个人脸信息
                AsfSingleFaceInfo asfSingleFaceInfo = new AsfSingleFaceInfo();
                try
                {
                    AsfFaceFeature asfFaceFeature = arcFaceCore.FaceFeatureExtract(imageData, ref asfSingleFaceInfo);
                }
                catch (ResultCodeException e)
                {
                    Console.WriteLine(e.ResultCode);
                    //throw;
                }

                win.Image = bitmap.ToMat();

                // 释放销毁引擎
                arcFaceCore.Dispose();
                // ImageData使用完之后记得要 Dispose 否则会导致内存溢出
                imageData.Dispose();
                //faceData2.Dispose();
                // BItmap也要记得 Dispose
                //face1.Dispose();
                bitmap.Dispose();
            }
        }
예제 #22
0
        private void btn_compare_Click(object sender, EventArgs e)
        {
            //图片比对
            ArcFaceCore arcFaceImg = new ArcFaceCore(APPID, FT_SDKKEY, ArcFaceDetectMode.IMAGE,
             ArcFaceFunction.FACE_DETECT | ArcFaceFunction.FACE_RECOGNITION | ArcFaceFunction.AGE | ArcFaceFunction.FACE_3DANGLE | ArcFaceFunction.GENDER, DetectionOrientPriority.ASF_OP_0_ONLY, 1, 16);

            Bitmap camImg = new Bitmap(@"E:\\CAM.JPG");
            Bitmap idCardImg = new Bitmap(@"E:\\IDCARDIMG.JPG");
            //将第一张图片的 Bitmap 转换成 ImageData
            ImageData camImgData = ImageDataConverter.ConvertToImageData(camImg);
            ImageData idCardImgData = ImageDataConverter.ConvertToImageData(idCardImg);
            try
            {
                // 检测第一张图片中的人脸
                MultiFaceModel camImgMultiFace = arcFaceImg.FaceDetection(camImgData);
                // 取第一张图片中返回的第一个人脸信息
                AsfSingleFaceInfo camImgfaceInfo = camImgMultiFace.FaceInfoList.First();
                // 提第一张图片中返回的第一个人脸的特征
                AsfFaceFeature asfFaceFeatureCam = arcFaceImg.FaceFeatureExtract(camImgData, ref camImgfaceInfo);

                
                MultiFaceModel idCardImgMultiFace = arcFaceImg.FaceDetection(idCardImgData);
                AsfSingleFaceInfo idCardImgfaceInfo = idCardImgMultiFace.FaceInfoList.First();
                AsfFaceFeature asfFaceFeatureIdCard = arcFaceImg.FaceFeatureExtract(idCardImgData, ref idCardImgfaceInfo);

                float ret = arcFaceImg.FaceCompare(asfFaceFeatureCam, asfFaceFeatureIdCard);

                if (ret > 0.6)
                {
                    //MessageBox.Show("人脸匹配成功"+ret);
                    lbl_msg.ForeColor = Color.Green;
                    lbl_msg.Text = "人脸匹配成功--相似度:" + ret;


                    bPlayflag = false;
                    ThreadCam.Abort();
                    m_vCapture.Release();
                    btn_play.Text = "打开摄像头";

                }
                else
                {
                    //MessageBox.Show("人脸匹配失败" + ret);
                    lbl_msg.ForeColor = Color.Red;
                    lbl_msg.Text = "人脸匹配失败--相似度:" + ret;
                }
            }
            catch(Exception ex)
            {
                //MessageBox.Show("人脸匹配失败Ex");
                lbl_msg.ForeColor = Color.Red;
                lbl_msg.Text = "人脸匹配失败Ex:" + ex.Message;
            }
            finally
            {
                //释放销毁引擎
                arcFaceImg.Dispose();

                // ImageData使用完之后记得要 Dispose 否则会导致内存溢出 
                camImgData.Dispose();
                idCardImgData.Dispose();
                // BItmap也要记得 Dispose
                camImg.Dispose();
                idCardImg.Dispose();

                GC.Collect();
            }
        }