Esempio n. 1
0
        /// <summary>
        /// Download and initialize the facial landmark detector
        /// </summary>
        /// <param name="onDownloadProgressChanged">Callback when download progress has been changed</param>
        /// <returns>Async task</returns>
        public async Task Init(System.Net.DownloadProgressChangedEventHandler onDownloadProgressChanged = null)
        {
            if (_facemark == null)
            {
                FileDownloadManager manager = new FileDownloadManager();
                manager.AddFile(
                    "https://raw.githubusercontent.com/kurnianggoro/GSOC2017/master/data/lbfmodel.yaml",
                    "facemark",
                    "70DD8B1657C42D1595D6BD13D97D932877B3BED54A95D3C4733A0F740D1FD66B");
                if (onDownloadProgressChanged != null)
                {
                    manager.OnDownloadProgressChanged += onDownloadProgressChanged;
                }
                await manager.Download();

                if (manager.AllFilesDownloaded)
                {
                    using (FacemarkLBFParams facemarkParam = new CV.Face.FacemarkLBFParams())
                    {
                        _facemark = new FacemarkLBF(facemarkParam);
                        _facemark.LoadModel(manager.Files[0].LocalFile);
                    }
                }
            }
        }
Esempio n. 2
0
    void Start()
    {
        //cascadePath = Path.Combine (Directory.GetCurrentDirectory (), AssetDatabase.GetAssetPath (cascadeFile));

        // We initialize webcam texture data
        webcamTexture = new WebCamTexture();
        webcamTexture.Play();

        width  = webcamTexture.width;
        height = webcamTexture.height;

        // We store settings internally for openCV after loading them in, these are the filepaths
        filePath   = Path.Combine(Application.persistentDataPath, cascadeModel.name + ".xml");
        fmFilePath = Path.Combine(Application.persistentDataPath, "lbfmodel.yaml");

        // We initialize the facemark system that will be used to recognize our face
        fParams           = new FacemarkLBFParams();
        fParams.ModelFile = fmFilePath;
        facemark          = new FacemarkLBF(fParams);
        facemark.LoadModel(fParams.ModelFile);

        File.WriteAllBytes(filePath, cascadeModel.bytes);

        convertedTexture = new Texture2D(width, height);

        Debug.Log("Tracking Started! Recording with " + webcamTexture.deviceName + " at " + webcamTexture.width + "x" + webcamTexture.height);

        InvokeRepeating("Track", trackingInterval, trackingInterval);
    }
Esempio n. 3
0
 public void CreateAndDispose()
 {
     using (var facemark = FacemarkLBF.Create())
     {
         GC.KeepAlive(facemark);
     }
 }
        static void Main(string[] args)
        {
            // Work with images folder configuration options
            string imagesFolder   = "D:\\My Work\\VR\\images";
            string outputFilepath = "D:\\My Work\\VR\\test-data-norm.txt";

            // Work with csv datasets configuration options
            Size   imageSize = new Size(350, 350);
            string csvInput  = "D:\\My Work\\VR\\dataset\\facial_emotions_2_ready.csv";
            string csvOutput = "D:\\My Work\\VR\\dataset\\faceexpress_dataset_v2_2.csv";

            // Models
            string faceDetectorModel = "D:\\My Work\\VR\\resources\\haarcascade_frontalface_alt2.xml";
            string facemarkModel     = "D:\\My Work\\VR\\resources\\lbfmodel.yaml";

            CascadeClassifier faceDetector   = new CascadeClassifier(faceDetectorModel);
            FacemarkLBFParams facemarkParams = new FacemarkLBFParams();
            FacemarkLBF       facemark       = new FacemarkLBF(facemarkParams);

            facemark.LoadModel(facemarkModel);

            if (runWithCsvDataset)
            {
                RunWithCsv(faceDetector, facemark, csvInput, csvOutput, imageSize);
            }
            else
            {
                RunWithImagesFolder(imagesFolder, outputFilepath, faceDetector, facemark);
            }

            Console.WriteLine("Program finished successfully!");
        }
Esempio n. 5
0
 public void CreateAndDisposeWithParameter()
 {
     using (var parameter = new FacemarkLBF.Params())
         using (var facemark = FacemarkLBF.Create(parameter))
         {
             GC.KeepAlive(facemark);
         }
 }
Esempio n. 6
0
 /// <summary>
 /// Release the memory associated with this facemark detector.
 /// </summary>
 protected override void DisposeObject()
 {
     if (_facemark != null)
     {
         _facemark.Dispose();
         _facemark = null;
     }
 }
Esempio n. 7
0
        static void Main(string[] args)
        {
            using (var sourceMat = new Mat("source.jpg"))
                using (var destinationMat = new Mat("destination.jpg"))
                    using (var hc = new CascadeClassifier("HaarCascade.xml"))
                        using (var facemark = FacemarkLBF.Create())
                        {
                            Console.WriteLine("Face detection starting..");
                            var sourceFaceRects = hc.DetectMultiScale(sourceMat);
                            if (sourceFaceRects == null || sourceFaceRects.Length == 0)
                            {
                                Console.WriteLine($"Source image: No faces detected.");
                                return;
                            }
                            Console.WriteLine($"Source image: detected {sourceFaceRects.Length} faces.");

                            var destFaceRects = hc.DetectMultiScale(destinationMat);
                            if (destFaceRects == null || destFaceRects.Length == 0)
                            {
                                Console.WriteLine($"Destination image: No faces detected.");
                                return;
                            }
                            Console.WriteLine($"Destination image: detected {destFaceRects.Length} faces.");

                            facemark.LoadModel("lbfmodel.yaml");
                            using (var sourceInput = InputArray.Create(sourceFaceRects))
                                using (var destInput = InputArray.Create(destFaceRects))
                                {
                                    facemark.Fit(sourceMat, sourceInput, out Point2f[][] sourceLandmarks);
                                    var sourcePoints = sourceLandmarks[0];

                                    facemark.Fit(destinationMat, destInput, out Point2f[][] destLandmarks);
                                    var destPoints = destLandmarks[0];

                                    var triangles = destPoints.Take(60).GetDelaunayTriangles();
                                    var warps     = triangles.GetWarps(sourcePoints.Take(60), destPoints.Take(60));

                                    using (var warpedMat = sourceMat.ApplyWarps(destinationMat.Width, destinationMat.Height, warps))
                                        using (var mask = new Mat(destinationMat.Size(), MatType.CV_8UC3))
                                            using (var result = new Mat(destinationMat.Size(), MatType.CV_8UC3))
                                            {
                                                mask.SetTo(0);

                                                var convexHull = Cv2.ConvexHull(destPoints).Select(s => new Point(s.X, s.Y));
                                                Cv2.FillConvexPoly(mask, convexHull, Scalar.White);

                                                var rect   = Cv2.BoundingRect(convexHull);
                                                var center = new Point(rect.X + rect.Width / 2, rect.Y + rect.Height / 2);

                                                Cv2.SeamlessClone(warpedMat, destinationMat, mask, center, result, SeamlessCloneMethods.NormalClone);
                                                var blured = result.MedianBlur(5);
                                                blured.SaveImage("result.png");
                                            }
                                }
                        }
            Console.WriteLine("Done");
        }
    public FaceLandmarksDetector(string faceDetectorModel, string faceLandmarkerModel)
    {
        // Load face detector model
        faceDetector = new CascadeClassifier(faceDetectorModel);

        // Load facemark model (face landmarker)
        FacemarkLBFParams facemarkParams = new FacemarkLBFParams();

        facemark = new FacemarkLBF(facemarkParams);
        facemark.LoadModel(faceLandmarkerModel);
    }
        static private VectorOfPointF MarkFacialPoints(FacemarkLBF facemark, Image <Gray, byte> image, Rectangle faceRect, out bool isSuccess)
        {
            VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();
            VectorOfRect           faces     = new VectorOfRect(new Rectangle[] { faceRect });

            isSuccess = facemark.Fit(image, faces, landmarks);
            if (isSuccess)
            {
                return(landmarks[0]);     // return the landmarks for the first (and only) face rectangle
            }
            return(new VectorOfPointF()); // return an empty vector
        }
Esempio n. 10
0
        public SettingsFm()
        {
            InitializeComponent();
            fParams.ModelFile = @"lbfmodel.yaml";
            facemark          = new FacemarkLBF(fParams);
            facemark.LoadModel(fParams.ModelFile);

            fParams.NLandmarks = 32; // number of landmark points
            fParams.InitShapeN = 10; // number of multiplier for make data augmentation
            fParams.StagesN    = 5;  // amount of refinement stages
            fParams.TreeN      = 6;  // number of tree in the model for each landmark point
            fParams.TreeDepth  = 5;  //he depth of decision tree
        }
Esempio n. 11
0
        private void FindFacialFeaturePoints()
        {
            string facePath;

            try
            {
                // get face detect dataset
                facePath = Path.GetFileName(@"data/haarcascade_frontalface_default.xml");

                // get FFP dataset
                facemarkParam = new FacemarkLBFParams();
                facemark      = new FacemarkLBF(facemarkParam);
                facemark.LoadModel(@"data/lbfmodel.yaml");
            }

            catch (Exception ex)
            {
                throw new Exception(ex.Message);
            }

            // initialize imageMat
            currImageMat = CurrImageI.Mat;
            nextImageMat = NextImageI.Mat;

            // Current Face
            FacesListCurr = facesArrCurr.OfType <Rectangle>().ToList();

            // Find facial feature points
            VectorOfRect vrLeft = new VectorOfRect(facesArrCurr);

            landmarksCurr = new VectorOfVectorOfPointF();

            facemark.Fit(currImageMat, vrLeft, landmarksCurr);
            ffpCurr = landmarksCurr[curr.SelectedFace];


            // Next Face
            FacesListNext = facesArrNext.OfType <Rectangle>().ToList();

            // Find facial feature points
            VectorOfRect vrRight = new VectorOfRect(facesArrNext);

            landmarksNext = new VectorOfVectorOfPointF();

            facemark.Fit(nextImageMat, vrRight, landmarksNext);
            ffpNext = landmarksNext[next.SelectedFace];

            // Add Corner points
            ffpCurr = AddCornerPoints(ffpCurr, this.curr.ResizedImage.Mat);
            ffpNext = AddCornerPoints(ffpNext, this.next.ResizedImage.Mat);
        }
Esempio n. 12
0
        private void InitModel()
        {
            faceDetector = new CascadeClassifier(Constants.FACE_DETECTOR_PATH);
            FacemarkLBFParams fParams = new FacemarkLBFParams();

            fParams.ModelFile  = Constants.LANDMARK_DETECTOR_PATH;
            fParams.NLandmarks = 68; // number of landmark points
            fParams.InitShapeN = 10; // number of multiplier for make data augmentation
            fParams.StagesN    = 5;  // amount of refinement stages
            fParams.TreeN      = 6;  // number of tree in the model for each landmark point
            fParams.TreeDepth  = 5;  //he depth of decision tree
            facemark           = new FacemarkLBF(fParams);
            facemark.LoadModel(fParams.ModelFile);
        }
Esempio n. 13
0
        private void InitFacemark()
        {
            if (_facemark == null)
            {
                InitPath();
                String facemarkFileName  = "lbfmodel.yaml";
                String facemarkFileUrl   = "https://raw.githubusercontent.com/kurnianggoro/GSOC2017/master/data/";
                String facemarkFileLocal = DnnPage.DnnDownloadFile(
                    facemarkFileUrl,
                    facemarkFileName,
                    _path);

                using (FacemarkLBFParams facemarkParam = new CV.Face.FacemarkLBFParams())
                {
                    _facemark = new CV.Face.FacemarkLBF(facemarkParam);
                    _facemark.LoadModel(facemarkFileLocal);
                }
            }
        }
Esempio n. 14
0
        public async Task Init(System.Net.DownloadProgressChangedEventHandler onDownloadProgressChanged = null)
        {
            if (_facemark == null)
            {
                FileDownloadManager manager = new FileDownloadManager();
                manager.AddFile("https://raw.githubusercontent.com/kurnianggoro/GSOC2017/master/data/lbfmodel.yaml", "facemark");
                if (onDownloadProgressChanged != null)
                {
                    manager.OnDownloadProgressChanged += onDownloadProgressChanged;
                }
                await manager.Download();

                using (FacemarkLBFParams facemarkParam = new CV.Face.FacemarkLBFParams())
                {
                    _facemark = new FacemarkLBF(facemarkParam);
                    _facemark.LoadModel(manager.Files[0].LocalFile);
                }
            }
        }
        public Image <Bgr, Byte> GetFacePoints()
        {
            String facePath = Path.GetFullPath(@"../../data/haarcascade_frontalface_default.xml");

            //CascadeClassifier faceDetector = new CascadeClassifier(@"..\..\Resource\EMGUCV\haarcascade_frontalface_default.xml");
            CascadeClassifier faceDetector = new CascadeClassifier(facePath);
            FacemarkLBFParams fParams      = new FacemarkLBFParams();

            //fParams.ModelFile = @"..\..\Resource\EMGUCV\lbfmodel.yaml";
            fParams.ModelFile  = @"lbfmodel.yaml";
            fParams.NLandmarks = 68; // number of landmark points
            fParams.InitShapeN = 10; // number of multiplier for make data augmentation
            fParams.StagesN    = 5;  // amount of refinement stages
            fParams.TreeN      = 6;  // number of tree in the model for each landmark point
            fParams.TreeDepth  = 5;  //he depth of decision tree
            FacemarkLBF facemark = new FacemarkLBF(fParams);
            //facemark.SetFaceDetector(MyDetector);

            Image <Bgr, Byte>  image     = new Image <Bgr, byte>("test.png");
            Image <Gray, byte> grayImage = image.Convert <Gray, byte>();

            grayImage._EqualizeHist();

            VectorOfRect           faces     = new VectorOfRect(faceDetector.DetectMultiScale(grayImage));
            VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();

            facemark.LoadModel(fParams.ModelFile);

            bool success = facemark.Fit(grayImage, faces, landmarks);

            if (success)
            {
                Rectangle[] facesRect = faces.ToArray();
                for (int i = 0; i < facesRect.Length; i++)
                {
                    image.Draw(facesRect[i], new Bgr(Color.Blue), 2);
                    FaceInvoke.DrawFacemarks(image, landmarks[i], new Bgr(Color.Blue).MCvScalar);
                }
                return(image);
            }
            return(null);
        }
Esempio n. 16
0
        public bool FacemarkSettings()
        {
            try
            {
                fParams.ModelFile = @"lbfmodel.yaml";
                facemark          = new FacemarkLBF(fParams);
                facemark.LoadModel(fParams.ModelFile);

                fParams.NLandmarks = 32; // number of landmark points
                fParams.InitShapeN = 10; // number of multiplier for make data augmentation
                fParams.StagesN    = 5;  // amount of refinement stages
                fParams.TreeN      = 6;  // number of tree in the model for each landmark point
                fParams.TreeDepth  = 5;  //he depth of decision tree

                return(true);
            }
            catch (Exception ex)
            {
                MessageBox.Show("При завантаженнs налаштувань виникла помилка" + ex.Message);
                return(false);
            }
        }
Esempio n. 17
0
        public void GetFaces()
        {
            using (var parameter = new FacemarkLBF.Params())
            {
                parameter.CascadeFace = CascadeFile;

                using (var facemark = FacemarkLBF.Create(parameter))
                    using (var img = Image("lenna.png"))
                    {
                        bool ret = facemark.GetFaces(img, out var faces);
                        Assert.True(ret);
                        Assert.NotEmpty(faces);

                        if (Debugger.IsAttached)
                        {
                            foreach (var face in faces)
                            {
                                img.Rectangle(face, Scalar.Red, 2);
                            }
                            Window.ShowImages(img);
                        }
                    }
            }
        }
Esempio n. 18
0
        static void Main(string[] args)
        {
            //prepare images for averaging
            PrepareImages();

            //load facemark model
            _facemark = FacemarkLBF.Create();
            _facemark.LoadModel("lbfmodel.yaml");

            //collection for all found facemarks
            var allFaceMarks = new List <List <Point2f> >();

            //facemark search and save
            foreach (var image in Directory.GetFiles(TempDirName))
            {
                using (var mat = new Mat(image))
                {
                    var facesRects = _cascade.DetectMultiScale(mat);
                    using (var facesRectsArray = InputArray.Create(facesRects))
                    {
                        _facemark.Fit(mat, facesRectsArray, out Point2f[][] landmarks);
                        // only one face should be
                        allFaceMarks.Add(landmarks[0].ToList());
                    }
                }
            }

            //add static points
            foreach (var facemarks in allFaceMarks)
            {
                facemarks.Add(new Point2f(1, 1));
                facemarks.Add(new Point2f(1, _outputSize.Height / 2));
                facemarks.Add(new Point2f(1, _outputSize.Height - 1));
                facemarks.Add(new Point2f(_outputSize.Width - 1, 1));
                facemarks.Add(new Point2f(_outputSize.Width / 2, _outputSize.Height - 1));
                facemarks.Add(new Point2f(_outputSize.Width - 1, _outputSize.Height / 2));
                facemarks.Add(new Point2f(_outputSize.Width - 1, _outputSize.Height - 1));
            }

            //average Facemarks
            var averagePoints = new List <Point2f>();

            for (int i = 0; i < 75; i++)
            {
                float xSum = 0;
                float ySum = 0;
                for (int j = 0; j < allFaceMarks.Count; j++)
                {
                    var point = allFaceMarks[j][i];
                    xSum += point.X;
                    ySum += point.Y;
                }
                averagePoints.Add(new Point2f(xSum / allFaceMarks.Count, ySum / allFaceMarks.Count));
            }

            //calculate delaunay triangles
            var destinationTriangles = averagePoints.GetDelaunayTriangles();

            //create result mat
            var outputMat = new Mat(_outputSize, _matTypeDefault);

            outputMat.SetTo(0);

            // blending coeff
            var delta = 1.0 / allFaceMarks.Count;

            // warping and blending
            var files = Directory.GetFiles(TempDirName);

            for (int i = 0; i < files.Length; i++)
            {
                using (var mat = new Mat(files[i]))
                {
                    var landmarks = allFaceMarks[i];
                    var warps     = destinationTriangles.GetWarps(landmarks, averagePoints);
                    var warpedImg = mat.ApplyWarps(mat.Width, mat.Height, warps);
                    Cv2.AddWeighted(outputMat, 1, warpedImg, delta, 0, outputMat);
                }
            }

            //save
            outputMat.SaveImage("result.png");
            Console.WriteLine("Done.");
        }
 public FaceLandmarkDetectorLBF(FaceDetector faceDetector, string landmarkModel)
 {
     this.faceDetector = faceDetector;
     this.facemark     = new FacemarkLBF(new FacemarkLBFParams());
     this.facemark.LoadModel(landmarkModel);
 }
        static private void RunWithCsv(CascadeClassifier faceDetector, FacemarkLBF facemark, string inputFilepath, string outputFilepath, Size imageSize)
        {
            using (var csvreader = new CsvReader(new StreamReader(inputFilepath)))
                using (var csvwriter = new CsvWriter(new StreamWriter(outputFilepath, false)))
                {
                    csvwriter.WriteHeader <CsvFer2013ModRow>();
                    csvwriter.NextRecord();

                    var record   = new CsvFer2013Row();
                    var records  = csvreader.EnumerateRecords(record);
                    int recordId = 0;
                    foreach (var r in records)
                    {
                        recordId++;

                        Image <Gray, byte> image = StringToImage(r.pixels, imageSize);

                        Rectangle face = image.ROI;
                        if (localiseFace)
                        {
                            Rectangle?detectionResult = DetectFace(faceDetector, image);
                            if (!detectionResult.HasValue)
                            {
                                continue;
                            }
                            face = detectionResult.Value;
                        }

                        //Image<Bgr, byte> colorImage = image.Convert<Bgr, byte>();
                        //CvInvoke.Imshow("image", colorImage);
                        //CvInvoke.WaitKey();

                        VectorOfPointF landmarks = MarkFacialPoints(facemark, image, face, out bool isSuccess);
                        if (!isSuccess)
                        {
                            continue;
                        }

                        //FaceInvoke.DrawFacemarks(colorImage, landmarks, new Bgr(0, 0, 255).MCvScalar);
                        //CvInvoke.Imshow("landmarked image", colorImage);
                        //CvInvoke.WaitKey();
                        //CvInvoke.DestroyAllWindows();

                        PointF[] facepoints = landmarks.ToArray();
                        if (normalise)
                        {
                            NormalizeFacepoints(facepoints);
                        }

                        SerializeFacepointsWithCsv(csvwriter, r, recordId, ref facepoints);

                        if (verbose)
                        {
                            Console.Write("\rRecord No: {0}", recordId);
                        }
                    }
                    if (verbose)
                    {
                        Console.WriteLine();
                    }
                }
        }
        static private void RunWithImagesFolder(string imagesFolder, string outputFilepath, CascadeClassifier faceDetector, FacemarkLBF facemark)
        {
            using (StreamWriter writer = new StreamWriter(outputFilepath, false))
                foreach (string filename in Directory.EnumerateFiles(imagesFolder))
                {
                    Image <Gray, byte> image = new Image <Gray, byte>(
                        CvInvoke.Imread(filename, ImreadModes.Grayscale).Bitmap);

                    Rectangle face = image.ROI;
                    if (localiseFace)
                    {
                        Rectangle?detectionResult = DetectFace(faceDetector, image);
                        if (!detectionResult.HasValue)
                        {
                            continue;
                        }
                        face = detectionResult.Value;
                    }

                    VectorOfPointF landmarks = MarkFacialPoints(facemark, image, face, out bool isSuccess);
                    if (!isSuccess)
                    {
                        continue;
                    }

                    PointF[] facepoints = landmarks.ToArray();
                    if (normalise)
                    {
                        NormalizeFacepoints(facepoints);
                    }

                    SerializeFacepoints(writer, filename, ref facepoints);
                }
        }