public void Deserialize2() { var path = Path.Combine(this.ModelDirectory, "dlib_face_recognition_resnet_model_v1.dat"); using (var loss = LossMetric.Deserialize(File.ReadAllBytes(path))) Assert.Equal(132, loss.NumLayers); }
/// <summary> /// Initializes a new instance of the <see cref="FaceRecognition"/> class with the directory path that stores model files. /// </summary> /// <param name="directory">The directory path that stores model files.</param> /// <exception cref="FileNotFoundException">The model file is not found.</exception> /// <exception cref="DirectoryNotFoundException">The specified directory path is not found.</exception> private FaceRecognition(string directory) { if (!Directory.Exists(directory)) { throw new DirectoryNotFoundException(directory); } var predictor68PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictorModelLocation()); if (!File.Exists(predictor68PointModel)) { throw new FileNotFoundException(predictor68PointModel); } var predictor5PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictorFivePointModelLocation()); if (!File.Exists(predictor5PointModel)) { throw new FileNotFoundException(predictor5PointModel); } var cnnFaceDetectionModel = Path.Combine(directory, FaceRecognitionModels.GetCnnFaceDetectorModelLocation()); if (!File.Exists(cnnFaceDetectionModel)) { throw new FileNotFoundException(cnnFaceDetectionModel); } var faceRecognitionModel = Path.Combine(directory, FaceRecognitionModels.GetFaceRecognitionModelLocation()); if (!File.Exists(faceRecognitionModel)) { throw new FileNotFoundException(faceRecognitionModel); } this._FaceDetector?.Dispose(); this._FaceDetector = DlibDotNet.Dlib.GetFrontalFaceDetector(); this._PosePredictor68Point?.Dispose(); this._PosePredictor68Point = ShapePredictor.Deserialize(predictor68PointModel); this._PosePredictor5Point?.Dispose(); this._PosePredictor5Point = ShapePredictor.Deserialize(predictor5PointModel); this._CnnFaceDetector?.Dispose(); this._CnnFaceDetector = LossMmod.Deserialize(cnnFaceDetectionModel); this._FaceEncoder?.Dispose(); this._FaceEncoder = LossMetric.Deserialize(faceRecognitionModel); var predictor194PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictor194PointModelLocation()); if (File.Exists(predictor194PointModel)) { this._PosePredictor194Point?.Dispose(); this._PosePredictor194Point = ShapePredictor.Deserialize(predictor194PointModel); } }
public void Operator() { var image = this.GetDataFile("Lenna.jpg"); var path1 = Path.Combine(this.ModelDirectory, "dlib_face_recognition_resnet_model_v1.dat"); var path2 = Path.Combine(this.ModelDirectory, "shape_predictor_5_face_landmarks.dat"); using (var net1 = LossMetric.Deserialize(path1)) using (var net2 = LossMetric.Deserialize(File.ReadAllBytes(path1))) using (var sp = ShapePredictor.Deserialize(path2)) using (var matrix = Dlib.LoadImageAsMatrix <RgbPixel>(image.FullName)) using (var detector = Dlib.GetFrontalFaceDetector()) { var faces = new List <Matrix <RgbPixel> >(); foreach (var face in detector.Operator(matrix)) { var shape = sp.Detect(matrix, face); var faceChipDetail = Dlib.GetFaceChipDetails(shape, 150, 0.25); var faceChip = Dlib.ExtractImageChip <RgbPixel>(matrix, faceChipDetail); faces.Add(faceChip); } foreach (var face in faces) { using (var ret1 = net1.Operator(face)) using (var ret2 = net2.Operator(face)) { Assert.Equal(1, ret1.Count); Assert.Equal(1, ret2.Count); var r1 = ret1[0]; var r2 = ret2[0]; Assert.Equal(r1.Columns, r2.Columns); Assert.Equal(r1.Rows, r2.Rows); for (var c = 0; c < r1.Columns; c++) { for (var r = 0; r < r1.Rows; r++) { Assert.Equal(r1[r, c], r2[r, c]); } } } face.Dispose(); } } }
public FaceContrast(string path) { var files = Directory.GetFiles(path, "*.jpg"); if (files.Length > 0) { using (var net = LossMetric.Deserialize(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ShapeModel", "dlib_face_recognition_resnet_model_v1.dat"))) { List <Matrix <RgbPixel> > facesList = new List <Matrix <RgbPixel> >(); foreach (var item in files) { facesList.Add(Dlib.LoadImageAsMatrix <RgbPixel>(item)); } _faceDescriptors = net.Operator(facesList); } } }
public async Task <ActionResult> Login([FromBody] InputFaceModel model) { RequestFaceModel request = new RequestFaceModel() { Status = 500, Message = null }; var filePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "FaceImages", model.user_name); if (!Directory.Exists(filePath)) { request.Enum = RequestEnum.Failed; Console.WriteLine(request.Message); Thread.Sleep(5000); return(Ok(request)); } FaceContrast faceContrast = new FaceContrast(filePath); VideoCapture cap = null; try { if (model.rmtp_url == "0") { cap = new VideoCapture(0); } else { cap = new VideoCapture(model.rmtp_url); } var flag = false; var faceFlag = false; var bioFlag = false; QueueFixedLength <double> leftEarQueue = new QueueFixedLength <double>(10); QueueFixedLength <double> rightEarQueue = new QueueFixedLength <double>(10); QueueFixedLength <double> mouthQueue = new QueueFixedLength <double>(20); bool leftEarFlag = false; bool rightEarFlag = false; bool mouthFlag = false; using (var sp = ShapePredictor.Deserialize(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ShapeModel", "shape_predictor_5_face_landmarks.dat"))) using (var win = new ImageWindow()) { // Load face detection and pose estimation models. using (var detector = Dlib.GetFrontalFaceDetector()) using (var net = LossMetric.Deserialize(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ShapeModel", "dlib_face_recognition_resnet_model_v1.dat"))) using (var poseModel = ShapePredictor.Deserialize(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ShapeModel", "shape_predictor_68_face_landmarks.dat"))) { var ti = true; System.Timers.Timer t = new System.Timers.Timer(30000); t.Elapsed += new System.Timers.ElapsedEventHandler((object source, System.Timers.ElapsedEventArgs e) => { ti = false; }); t.AutoReset = false; t.Enabled = true; //抓取和处理帧,直到用户关闭主窗口。 while (/*!win.IsClosed() &&*/ ti) { try { // Grab a frame var temp = new Mat(); if (!cap.Read(temp)) { break; } //把OpenCV的Mat变成dlib可以处理的东西。注意 //包装Mat对象,它不复制任何东西。所以cimg只对as有效 //只要温度是有效的。也不要做任何可能导致它的临时工作 //重新分配存储图像的内存,因为这将使cimg //包含悬空指针。这基本上意味着您不应该修改temp //使用cimg时。 var array = new byte[temp.Width * temp.Height * temp.ElemSize()]; Marshal.Copy(temp.Data, array, 0, array.Length); using (var cimg = Dlib.LoadImageData <RgbPixel>(array, (uint)temp.Height, (uint)temp.Width, (uint)(temp.Width * temp.ElemSize()))) { // Detect faces var faces = detector.Operator(cimg); // Find the pose of each face. var shapes = new List <FullObjectDetection>(); for (var i = 0; i < faces.Length; ++i) { var det = poseModel.Detect(cimg, faces[i]); shapes.Add(det); } if (shapes.Count > 0) { // 活体检测 if (!bioFlag) { bioFlag = BioAssay(shapes[0], ref leftEarQueue, ref rightEarQueue, ref mouthQueue, ref leftEarFlag, ref rightEarFlag, ref mouthFlag); } } if (!faceFlag) { foreach (var face in faces) { var shape = sp.Detect(cimg, face); var faceChipDetail = Dlib.GetFaceChipDetails(shape, 150, 0.25); Matrix <RgbPixel> rgbPixels = new Matrix <RgbPixel>(cimg); var faceChip = Dlib.ExtractImageChip <RgbPixel>(rgbPixels, faceChipDetail); var faceDescriptors = net.Operator(faceChip); faceFlag = faceContrast.Contrast(faceDescriptors); } } Console.WriteLine(model.user_name + ":" + faceFlag); if (bioFlag && faceFlag) { flag = bioFlag && faceFlag; if (flag) { break; } } //在屏幕上显示 win.ClearOverlay(); win.SetImage(cimg); var lines = Dlib.RenderFaceDetections(shapes); win.AddOverlay(faces, new RgbPixel { Red = 72, Green = 118, Blue = 255 }); win.AddOverlay(lines); foreach (var line in lines) { line.Dispose(); } } } catch (Exception ex) { request.Message = ex.ToString(); break; } } } } if (flag) { request.Enum = RequestEnum.Succeed; } else { request.Enum = RequestEnum.Failed; } } catch (Exception ex) { request.Message = ex.ToString(); } finally { if (cap != null) { cap.Dispose(); } } Console.WriteLine(request.Message); return(Ok(request)); }