public static IEnumerable <Matrix <double> > ComputeFaceDescriptors(LossMetric net, Image img, IEnumerable <FullObjectDetection> faces, int numJitters)
        {
            var batchImage = new[] { img };
            var batchFaces = new[] { faces };

            return(BatchComputeFaceDescriptors(net, batchImage, batchFaces, numJitters).First());
        }
Exemplo n.º 2
0
        public DLibFaceIdentification(IImageRotationService imageRotationService)
        {
            this.imageRotationService = imageRotationService ?? throw new ArgumentNullException(nameof(imageRotationService));
            detector = Dlib.GetFrontalFaceDetector();

            // set up a 5-point landmark detector
            predictor = ShapePredictor.Deserialize("model/shape_predictor_5_face_landmarks.dat");

            // set up a neural network for face recognition
            dnn = DlibDotNet.Dnn.LossMetric.Deserialize("model/dlib_face_recognition_resnet_model_v1.dat");

            // create a color palette for plotting
            palette = new RgbPixel[]
            {
                new RgbPixel(0xe6, 0x19, 0x4b),
                new RgbPixel(0xf5, 0x82, 0x31),
                new RgbPixel(0xff, 0xe1, 0x19),
                new RgbPixel(0xbc, 0xf6, 0x0c),
                new RgbPixel(0x3c, 0xb4, 0x4b),
                new RgbPixel(0x46, 0xf0, 0xf0),
                new RgbPixel(0x43, 0x63, 0xd8),
                new RgbPixel(0x91, 0x1e, 0xb4),
                new RgbPixel(0xf0, 0x32, 0xe6),
                new RgbPixel(0x80, 0x80, 0x80)
            };
        }
Exemplo n.º 3
0
        public void Deserialize2()
        {
            var path = Path.Combine(this.ModelDirectory, "dlib_face_recognition_resnet_model_v1.dat");

            using (var loss = LossMetric.Deserialize(File.ReadAllBytes(path)))
                Assert.Equal(132, loss.NumLayers);
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="FaceRecognition"/> class with the directory path that stores model files.
        /// </summary>
        /// <param name="directory">The directory path that stores model files.</param>
        /// <exception cref="FileNotFoundException">The model file is not found.</exception>
        /// <exception cref="DirectoryNotFoundException">The specified directory path is not found.</exception>
        private FaceRecognition(string directory)
        {
            if (!Directory.Exists(directory))
            {
                throw new DirectoryNotFoundException(directory);
            }

            var predictor68PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictorModelLocation());

            if (!File.Exists(predictor68PointModel))
            {
                throw new FileNotFoundException(predictor68PointModel);
            }

            var predictor5PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictorFivePointModelLocation());

            if (!File.Exists(predictor5PointModel))
            {
                throw new FileNotFoundException(predictor5PointModel);
            }

            var cnnFaceDetectionModel = Path.Combine(directory, FaceRecognitionModels.GetCnnFaceDetectorModelLocation());

            if (!File.Exists(cnnFaceDetectionModel))
            {
                throw new FileNotFoundException(cnnFaceDetectionModel);
            }

            var faceRecognitionModel = Path.Combine(directory, FaceRecognitionModels.GetFaceRecognitionModelLocation());

            if (!File.Exists(faceRecognitionModel))
            {
                throw new FileNotFoundException(faceRecognitionModel);
            }

            this._FaceDetector?.Dispose();
            this._FaceDetector = DlibDotNet.Dlib.GetFrontalFaceDetector();

            this._PosePredictor68Point?.Dispose();
            this._PosePredictor68Point = ShapePredictor.Deserialize(predictor68PointModel);

            this._PosePredictor5Point?.Dispose();
            this._PosePredictor5Point = ShapePredictor.Deserialize(predictor5PointModel);

            this._CnnFaceDetector?.Dispose();
            this._CnnFaceDetector = LossMmod.Deserialize(cnnFaceDetectionModel);

            this._FaceEncoder?.Dispose();
            this._FaceEncoder = LossMetric.Deserialize(faceRecognitionModel);

            var predictor194PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictor194PointModelLocation());

            if (File.Exists(predictor194PointModel))
            {
                this._PosePredictor194Point?.Dispose();
                this._PosePredictor194Point = ShapePredictor.Deserialize(predictor194PointModel);
            }
        }
        public DLibFaceIdentificationService(IImageRotationService imageRotationService)
        {
            this.imageRotationService = imageRotationService ?? throw new ArgumentNullException(nameof(imageRotationService));

            // set up a 5-point landmark detector
            predictor = ShapePredictor.Deserialize("model/shape_predictor_5_face_landmarks.dat");

            // set up a neural network for face recognition
            dnn = DlibDotNet.Dnn.LossMetric.Deserialize("model/dlib_face_recognition_resnet_model_v1.dat");
        }
Exemplo n.º 6
0
        public void Create()
        {
            var networkIds = Enumerable.Range(0, 2);

            foreach (var networkId in networkIds)
            {
                using (var loss = new LossMetric(networkId))
                    Assert.True(!loss.IsDisposed);
            }
        }
Exemplo n.º 7
0
        public void Operator()
        {
            var image = this.GetDataFile("Lenna.jpg");
            var path1 = Path.Combine(this.ModelDirectory, "dlib_face_recognition_resnet_model_v1.dat");
            var path2 = Path.Combine(this.ModelDirectory, "shape_predictor_5_face_landmarks.dat");

            using (var net1 = LossMetric.Deserialize(path1))
                using (var net2 = LossMetric.Deserialize(File.ReadAllBytes(path1)))
                    using (var sp = ShapePredictor.Deserialize(path2))
                        using (var matrix = Dlib.LoadImageAsMatrix <RgbPixel>(image.FullName))
                            using (var detector = Dlib.GetFrontalFaceDetector())
                            {
                                var faces = new List <Matrix <RgbPixel> >();
                                foreach (var face in detector.Operator(matrix))
                                {
                                    var shape          = sp.Detect(matrix, face);
                                    var faceChipDetail = Dlib.GetFaceChipDetails(shape, 150, 0.25);
                                    var faceChip       = Dlib.ExtractImageChip <RgbPixel>(matrix, faceChipDetail);
                                    faces.Add(faceChip);
                                }

                                foreach (var face in faces)
                                {
                                    using (var ret1 = net1.Operator(face))
                                        using (var ret2 = net2.Operator(face))
                                        {
                                            Assert.Equal(1, ret1.Count);
                                            Assert.Equal(1, ret2.Count);

                                            var r1 = ret1[0];
                                            var r2 = ret2[0];

                                            Assert.Equal(r1.Columns, r2.Columns);
                                            Assert.Equal(r1.Rows, r2.Rows);

                                            for (var c = 0; c < r1.Columns; c++)
                                            {
                                                for (var r = 0; r < r1.Rows; r++)
                                                {
                                                    Assert.Equal(r1[r, c], r2[r, c]);
                                                }
                                            }
                                        }

                                    face.Dispose();
                                }
                            }
        }
Exemplo n.º 8
0
        public static void SetAllBnRunningStatsWindowSizes(LossMetric net, uint newWindowSize)
        {
            if (net == null)
            {
                throw new ArgumentNullException(nameof(net));
            }

            net.ThrowIfDisposed();

            var ret = NativeMethods.set_all_bn_running_stats_window_sizes_loss_metric(net.NativePtr, net.NetworkType, newWindowSize);

            if (ret == NativeMethods.ErrorType.DnnNotSupportNetworkType)
            {
                throw new NotSupportNetworkTypeException(net.NetworkType);
            }
        }
Exemplo n.º 9
0
            public FaceContrast(string path)
            {
                var files = Directory.GetFiles(path, "*.jpg");

                if (files.Length > 0)
                {
                    using (var net = LossMetric.Deserialize(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ShapeModel", "dlib_face_recognition_resnet_model_v1.dat")))
                    {
                        List <Matrix <RgbPixel> > facesList = new List <Matrix <RgbPixel> >();
                        foreach (var item in files)
                        {
                            facesList.Add(Dlib.LoadImageAsMatrix <RgbPixel>(item));
                        }
                        _faceDescriptors = net.Operator(facesList);
                    }
                }
            }
        public static IEnumerable <IEnumerable <Matrix <double> > > BatchComputeFaceDescriptors(LossMetric net,
                                                                                                IList <Image> batchImages,
                                                                                                IList <IEnumerable <FullObjectDetection> > batchFaces,
                                                                                                int numJitters)
        {
            if (batchImages.Count() != batchFaces.Count())
            {
                throw new ArgumentException("The array of images and the array of array of locations must be of the same size");
            }

            foreach (var faces in batchFaces)
            {
                foreach (var f in faces)
                {
                    if (f.Parts != 68 && f.Parts != 5)
                    {
                        throw new ArgumentException("The full_object_detection must use the iBUG 300W 68 point face landmark style or dlib's 5 point style.");
                    }
                }
            }


            var faceChips = new List <Matrix <RgbPixel> >();

            for (var i = 0; i < batchImages.Count(); ++i)
            {
                var faces = batchFaces[i];
                var img   = batchImages[i];

                var dets = new List <ChipDetails>();
                foreach (var f in faces)
                {
                    dets.Add(DlibDotNet.Dlib.GetFaceChipDetails(f, 150, 0.25));
                }

                var thisImageFaceChips = DlibDotNet.Dlib.ExtractImageChips <RgbPixel>(img.Matrix, dets);

                foreach (var chip in thisImageFaceChips)
                {
                    faceChips.Add(chip);
                }
            }

            var faceDescriptors = new List <List <Matrix <double> > >();

            if (numJitters <= 1)
            {
                // extract descriptors and convert from float vectors to double vectors
                var descriptors = net.Operator(faceChips, 16);
                var index       = 0;
                var list        = descriptors.Select(matrix => matrix).ToArray();
                for (var i = 0; i < batchFaces.Count(); ++i)
                {
                    faceDescriptors.Add(new List <Matrix <double> >());
                    for (var j = 0; j < batchFaces[i].Count(); ++j)
                    {
                        faceDescriptors[i].Add(DlibDotNet.Dlib.MatrixCast <double>(list[index++]));
                    }
                }

                if (index != list.Length)
                {
                    throw new ApplicationException();
                }
            }
            else
            {
                // extract descriptors and convert from float vectors to double vectors
                var index = 0;
                for (var i = 0; i < batchFaces.Count(); ++i)
                {
                    for (var j = 0; j < batchFaces[i].Count(); ++j)
                    {
                        var tmp  = JitterImage(faceChips[index++], numJitters);
                        var tmp2 = net.Operator(tmp, 16);
                        var mat  = DlibDotNet.Dlib.Mat(tmp2);
                        var r    = DlibDotNet.Dlib.Mean <double>(mat);
                        faceDescriptors[i].Add(r);
                    }
                }

                if (index != faceChips.Count)
                {
                    throw new ApplicationException();
                }
            }

            return(faceDescriptors);
        }
        public static Matrix <double> ComputeFaceDescriptor(LossMetric net, Image img, FullObjectDetection face, int numJitters)
        {
            var faces = new[] { face };

            return(ComputeFaceDescriptors(net, img, faces, numJitters).First());
        }
Exemplo n.º 12
0
        public async Task <ActionResult> Login([FromBody] InputFaceModel model)
        {
            RequestFaceModel request = new RequestFaceModel()
            {
                Status  = 500,
                Message = null
            };
            var filePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "FaceImages", model.user_name);

            if (!Directory.Exists(filePath))
            {
                request.Enum = RequestEnum.Failed;
                Console.WriteLine(request.Message);
                Thread.Sleep(5000);
                return(Ok(request));
            }
            FaceContrast faceContrast = new FaceContrast(filePath);

            VideoCapture cap = null;

            try
            {
                if (model.rmtp_url == "0")
                {
                    cap = new VideoCapture(0);
                }
                else
                {
                    cap = new VideoCapture(model.rmtp_url);
                }


                var flag     = false;
                var faceFlag = false;

                var bioFlag = false;

                QueueFixedLength <double> leftEarQueue  = new QueueFixedLength <double>(10);
                QueueFixedLength <double> rightEarQueue = new QueueFixedLength <double>(10);
                QueueFixedLength <double> mouthQueue    = new QueueFixedLength <double>(20);
                bool leftEarFlag  = false;
                bool rightEarFlag = false;
                bool mouthFlag    = false;
                using (var sp = ShapePredictor.Deserialize(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ShapeModel", "shape_predictor_5_face_landmarks.dat")))
                    using (var win = new ImageWindow())
                    {
                        // Load face detection and pose estimation models.
                        using (var detector = Dlib.GetFrontalFaceDetector())
                            using (var net = LossMetric.Deserialize(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ShapeModel", "dlib_face_recognition_resnet_model_v1.dat")))
                                using (var poseModel = ShapePredictor.Deserialize(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "ShapeModel", "shape_predictor_68_face_landmarks.dat")))
                                {
                                    var ti = true;

                                    System.Timers.Timer t = new System.Timers.Timer(30000);
                                    t.Elapsed += new System.Timers.ElapsedEventHandler((object source, System.Timers.ElapsedEventArgs e) =>
                                    {
                                        ti = false;
                                    });

                                    t.AutoReset = false;
                                    t.Enabled   = true;

                                    //抓取和处理帧,直到用户关闭主窗口。
                                    while (/*!win.IsClosed() &&*/ ti)
                                    {
                                        try
                                        {
                                            // Grab a frame
                                            var temp = new Mat();
                                            if (!cap.Read(temp))
                                            {
                                                break;
                                            }

                                            //把OpenCV的Mat变成dlib可以处理的东西。注意
                                            //包装Mat对象,它不复制任何东西。所以cimg只对as有效
                                            //只要温度是有效的。也不要做任何可能导致它的临时工作
                                            //重新分配存储图像的内存,因为这将使cimg
                                            //包含悬空指针。这基本上意味着您不应该修改temp
                                            //使用cimg时。
                                            var array = new byte[temp.Width * temp.Height * temp.ElemSize()];
                                            Marshal.Copy(temp.Data, array, 0, array.Length);
                                            using (var cimg = Dlib.LoadImageData <RgbPixel>(array, (uint)temp.Height, (uint)temp.Width, (uint)(temp.Width * temp.ElemSize())))
                                            {
                                                // Detect faces
                                                var faces = detector.Operator(cimg);
                                                // Find the pose of each face.
                                                var shapes = new List <FullObjectDetection>();
                                                for (var i = 0; i < faces.Length; ++i)
                                                {
                                                    var det = poseModel.Detect(cimg, faces[i]);
                                                    shapes.Add(det);
                                                }

                                                if (shapes.Count > 0)
                                                {
                                                    // 活体检测

                                                    if (!bioFlag)
                                                    {
                                                        bioFlag = BioAssay(shapes[0], ref leftEarQueue, ref rightEarQueue, ref mouthQueue, ref leftEarFlag, ref rightEarFlag, ref mouthFlag);
                                                    }
                                                }


                                                if (!faceFlag)
                                                {
                                                    foreach (var face in faces)
                                                    {
                                                        var shape                   = sp.Detect(cimg, face);
                                                        var faceChipDetail          = Dlib.GetFaceChipDetails(shape, 150, 0.25);
                                                        Matrix <RgbPixel> rgbPixels = new Matrix <RgbPixel>(cimg);
                                                        var faceChip                = Dlib.ExtractImageChip <RgbPixel>(rgbPixels, faceChipDetail);
                                                        var faceDescriptors         = net.Operator(faceChip);
                                                        faceFlag = faceContrast.Contrast(faceDescriptors);
                                                    }
                                                }
                                                Console.WriteLine(model.user_name + ":" + faceFlag);
                                                if (bioFlag && faceFlag)
                                                {
                                                    flag = bioFlag && faceFlag;
                                                    if (flag)
                                                    {
                                                        break;
                                                    }
                                                }

                                                //在屏幕上显示
                                                win.ClearOverlay();
                                                win.SetImage(cimg);
                                                var lines = Dlib.RenderFaceDetections(shapes);
                                                win.AddOverlay(faces, new RgbPixel {
                                                    Red = 72, Green = 118, Blue = 255
                                                });
                                                win.AddOverlay(lines);
                                                foreach (var line in lines)
                                                {
                                                    line.Dispose();
                                                }
                                            }
                                        }
                                        catch (Exception ex)
                                        {
                                            request.Message = ex.ToString();
                                            break;
                                        }
                                    }
                                }
                    }

                if (flag)
                {
                    request.Enum = RequestEnum.Succeed;
                }
                else
                {
                    request.Enum = RequestEnum.Failed;
                }
            }
            catch (Exception ex)
            {
                request.Message = ex.ToString();
            }
            finally
            {
                if (cap != null)
                {
                    cap.Dispose();
                }
            }
            Console.WriteLine(request.Message);
            return(Ok(request));
        }
Exemplo n.º 13
0
        private static void Main()
        {
            try
            {
                // The API for doing metric learning is very similar to the API for
                // multi-class classification.  In fact, the inputs are the same, a bunch of
                // labeled objects.  So here we create our dataset.  We make up some simple
                // vectors and label them with the integers 1,2,3,4.  The specific values of
                // the integer labels don't matter.
                var samples = new List <Matrix <double> >();
                var labels  = new List <uint>();

                // class 1 training vectors
                samples.Add(new Matrix <double>(new MatrixTemplateSizeParameter(0, 1), new double[] { 1, 0, 0, 0, 0, 0, 0, 0 })); labels.Add(1);
                samples.Add(new Matrix <double>(new MatrixTemplateSizeParameter(0, 1), new double[] { 0, 1, 0, 0, 0, 0, 0, 0 })); labels.Add(1);

                // class 2 training vectors
                samples.Add(new Matrix <double>(new MatrixTemplateSizeParameter(0, 1), new double[] { 0, 0, 1, 0, 0, 0, 0, 0 })); labels.Add(2);
                samples.Add(new Matrix <double>(new MatrixTemplateSizeParameter(0, 1), new double[] { 0, 0, 0, 1, 0, 0, 0, 0 })); labels.Add(2);

                // class 3 training vectors
                samples.Add(new Matrix <double>(new MatrixTemplateSizeParameter(0, 1), new double[] { 0, 0, 0, 0, 1, 0, 0, 0 })); labels.Add(3);
                samples.Add(new Matrix <double>(new MatrixTemplateSizeParameter(0, 1), new double[] { 0, 0, 0, 0, 0, 1, 0, 0 })); labels.Add(3);

                // class 4 training vectors
                samples.Add(new Matrix <double>(new MatrixTemplateSizeParameter(0, 1), new double[] { 0, 0, 0, 0, 0, 0, 1, 0 })); labels.Add(4);
                samples.Add(new Matrix <double>(new MatrixTemplateSizeParameter(0, 1), new double[] { 0, 0, 0, 0, 0, 0, 0, 1 })); labels.Add(4);


                // Make a network that simply learns a linear mapping from 8D vectors to 2D
                // vectors.
                using (var net = new LossMetric(1))
                    using (var trainer = new DnnTrainer <LossMetric>(net))
                    {
                        trainer.SetLearningRate(0.1);

                        // It should be emphasized out that it's really important that each mini-batch contain
                        // multiple instances of each class of object.  This is because the metric learning
                        // algorithm needs to consider pairs of objects that should be close as well as pairs
                        // of objects that should be far apart during each training step.  Here we just keep
                        // training on the same small batch so this constraint is trivially satisfied.
                        while (trainer.GetLearningRate() >= 1e-4)
                        {
                            LossMetric.TrainOneStep(trainer, samples, labels);
                        }

                        // Wait for training threads to stop
                        trainer.GetNet().Dispose();
                        Console.WriteLine("done training");


                        // Run all the samples through the network to get their 2D vector embeddings.
                        var embedded = net.Operator(samples);

                        // Print the embedding for each sample to the screen.  If you look at the
                        // outputs carefully you should notice that they are grouped together in 2D
                        // space according to their label.
                        for (var i = 0; i < embedded.Count(); ++i)
                        {
                            using (var trans = Dlib.Trans(embedded[i]))
                                Console.Write($"label: {labels[i]}\t{trans}");
                        }

                        // Now, check if the embedding puts things with the same labels near each other and
                        // things with different labels far apart.
                        var numRight = 0;
                        var numWrong = 0;
                        for (var i = 0; i < embedded.Count(); ++i)
                        {
                            for (var j = i + 1; j < embedded.Count(); ++j)
                            {
                                if (labels[i] == labels[j])
                                {
                                    // The loss_metric layer will cause things with the same label to be less
                                    // than net.loss_details().get_distance_threshold() distance from each
                                    // other.  So we can use that distance value as our testing threshold for
                                    // "being near to each other".
                                    if (Dlib.Length(embedded[i] - embedded[j]) < net.GetLossDetails().GetDistanceThreshold())
                                    {
                                        ++numRight;
                                    }
                                    else
                                    {
                                        ++numWrong;
                                    }
                                }
                                else
                                {
                                    if (Dlib.Length(embedded[i] - embedded[j]) >= net.GetLossDetails().GetDistanceThreshold())
                                    {
                                        ++numRight;
                                    }
                                    else
                                    {
                                        ++numWrong;
                                    }
                                }
                            }
                        }

                        Console.WriteLine($"num_right: {numRight}");
                        Console.WriteLine($"num_wrong: {numWrong}");
                    }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }