static void Main(string[] args) { // Read image in var img = Dlib.LoadImage <RgbPixel>(imgFilePath); // Let's detect faces and draw rectangles around them FaceDetector faceDetector = new FaceDetector(facialLandmarksSerializedPredictor); Rectangle[] facesBoundingBoxes = faceDetector.DetectFacesBoundingBoxes(img); foreach (var bb in facesBoundingBoxes) { Dlib.DrawRectangle(img, bb, color: new RgbPixel(0, 0, 255), thickness: 3); } // Draw eyes bounding box for subject (i.e., largest) face if (facesBoundingBoxes.Length > 0) { // Example code if you wish to do this only on the largest face /*Rectangle subjectFaceBoundingBox = new Rectangle(0, 0); * foreach (var bb in facesBoundingBoxes) * { * if (bb.Area > subjectFaceBoundingBox.Area) * { * subjectFaceBoundingBox = bb; * } * }*/ // Here we do it on all faces foreach (var subjectFaceBoundingBox in facesBoundingBoxes) { // Next, obtain facial landmarks var landmarks = faceDetector.DetectFacialLandmarks(img, subjectFaceBoundingBox); // We also draw them foreach (Point p in landmarks) { Dlib.DrawRectangle(img, new Rectangle(p), color: new RgbPixel(255, 0, 0), thickness: 3); } // Now draw bounding box around the eyes var topLeft = new Point(landmarks[FacialLandmarks.RIGHT_EYEBROW].X, landmarks[FacialLandmarks.RIGHT_EYEBROW].Y); var bottomRight = new Point(landmarks[FacialLandmarks.LEFT_EYEBROW].X, landmarks[FacialLandmarks.UPPER_NOSE].Y); var eyesBoundingBox = new Rectangle(topLeft, bottomRight); Dlib.DrawRectangle(img, eyesBoundingBox, color: new RgbPixel(0, 255, 0), thickness: 3); } } // Create output file path (for later) string outFilePath; var tmpStrArray = imgFilePath.Split('/'); var extension = tmpStrArray[tmpStrArray.Length - 1].Split('.')[1]; outFilePath = String.Join('/', tmpStrArray.SkipLast(1).ToArray()) + '/' + tmpStrArray[tmpStrArray.Length - 1].Replace("." + extension, "_out." + extension); Console.WriteLine(outFilePath); // Write img faceDetector.WriteImageToFilePath(img, outFilePath); }
public static void DetectFacesAsync(string inputFilePath, string subscriptionKey, string uriBase, IFaceClient client, string vocabularyPath) { // set up Dlib facedetector DirectoryInfo dir = new DirectoryInfo(inputFilePath); using (var fd = Dlib.GetFrontalFaceDetector()) { foreach (FileInfo files in dir.GetFiles("*.jpg")) { string _inputFilePath = inputFilePath + files.Name; // load input image Array2D <RgbPixel> img = Dlib.LoadImage <RgbPixel>(_inputFilePath); // find all faces in the image Rectangle[] faces = fd.Operator(img); if (faces.Length != 0) { Console.WriteLine("Picture " + files.Name + " have faces, sending data to Azure"); MakeAnalysisRequestAsync(_inputFilePath, subscriptionKey, uriBase, files.Name, client, vocabularyPath).Wait(); } foreach (var face in faces) { // draw a rectangle for each face Dlib.DrawRectangle(img, face, color: new RgbPixel(0, 255, 255), thickness: 4); } // export the modified image Dlib.SaveJpeg(img, "./Results/" + files.Name); } }
/// <summary> /// The main program entry point /// </summary> /// <param name="args">The command line arguments</param> static void Main(string[] args) { // set up Dlib facedetectors and shapedetectors using (var fd = Dlib.GetFrontalFaceDetector()) using (var sp = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat")) { // load input image var img = Dlib.LoadImage <RgbPixel>(inputFilePath); // find all faces in the image var faces = fd.Operator(img); foreach (var face in faces) { // find the landmark points for this face var shape = sp.Detect(img, face); // draw the landmark points on the image for (var i = 0; i < shape.Parts; i++) { var point = shape.GetPart((uint)i); var rect = new Rectangle(point); Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 255, 0), thickness: 4); } } // export the modified image Dlib.SaveJpeg(img, "output.jpg"); } }
public static void DrawPointsOfLandmarks(FileInfo image) { using (var fd = Dlib.GetFrontalFaceDetector()) using (var sp = ShapePredictor.Deserialize(GetFile(ShapePredictorFileName).FullName)) { using (var img = Dlib.LoadImage <RgbPixel>(image.FullName)) { var faces = fd.Operator(img); // for each face draw over the facial landmarks foreach (var face in faces) { var shape = sp.Detect(img, face); // draw the landmark points on the image for (var i = 0; i < shape.Parts; i++) { var point = shape.GetPart((uint)i); var rect = new Rectangle(point); if (i == 0) { Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 255, 255), thickness: 8); } else if (i == 21 || i == 22 || i == 39 || i == 42 || i == 33 || i == 51 || i == 57 || i == 48 || i == 54) { Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 0, 255), thickness: 4); } else if (i == 18 || i == 19 || i == 20 || i == 21) // left eye { Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 0, 0), 6); } else if (i == 22 || i == 23 || i == 24 || i == 25) // right eye { Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 128, 0), 6); } else if (i == 48 || i == 49 || i == 50) // left lip { Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 255, 0), 2); } else if (i == 52 || i == 53 || i == 54) // right lip { Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 0, 128), 2); } else { Dlib.DrawRectangle(img, rect, color: new RgbPixel(0, 0, 0), thickness: 4); } } Dlib.SavePng(img, "output.jpg"); } } } }
/// <summary> /// 使用路径获取位置数据 /// </summary> /// <param name="url"></param> /// <returns></returns> public static Rectangle[] GetResult(string url) { var dets = new Rectangle[0]; url = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, url); using (var detector = Dlib.GetFrontalFaceDetector()) //using (var img = Dlib.LoadImage<RgbPixel>("png.png")) using (var img = Dlib.LoadImage <RgbPixel>(url)) { dets = detector.Operator(img); } return(dets); }
public static void DetectFacesOnImage(string sourceImagePath, string destImagePath) { // set up Dlib facedetector using (var fd = Dlib.GetFrontalFaceDetector()) { // load input image var image = Dlib.LoadImage <RgbPixel>(sourceImagePath); DetectFacesOnImage(image); // export the modified image Dlib.SaveJpeg(image, destImagePath); } }
public void FindFaces() { using (var fd = Dlib.GetFrontalFaceDetector()) { var img = Dlib.LoadImage <RgbPixel>(path); // find all faces in the image var faces = fd.Operator(img); foreach (var face in faces) { // draw a rectangle for each face Dlib.DrawRectangle(img, face, color: new RgbPixel(0, 255, 255), thickness: 4); } Dlib.SaveJpeg(img, @"D:\output.png"); } }
public static int Number(string file) { using (var fd = Dlib.GetFrontalFaceDetector()) { var img = Dlib.LoadImage <RgbPixel>(file); int number = 0; var faces = fd.Operator(img); foreach (var face in faces) { number += 1; } return(number); } }
public static void Recognize(string file) { using (var fd = Dlib.GetFrontalFaceDetector()) { var img = Dlib.LoadImage <RgbPixel>(file); //hola var faces = fd.Operator(img); foreach (var face in faces) { Dlib.DrawRectangle(img, face, color: new RgbPixel(0, 255, 255), thickness: 4); } Dlib.SaveJpeg(img, file); } }
/// <summary> /// Extract features from an image and store it in <see cref="FaceData1"/>. /// </summary> /// <param name="imageFileInfo">File info of the image.</param> /// <param name="sp"></param> /// <param name="fd"></param> /// <param name="getLabel">>Whether to get the label or not. False if not using for prediction.</param> /// <returns></returns> /// <seealso cref="GetFaceDataPoints1"/> static FaceData1 GetFaceData1FromImage(FileInfo imageFileInfo, ShapePredictor sp, FrontalFaceDetector fd, bool getLabel = true) { // load input image using (var img = Dlib.LoadImage <RgbPixel>(imageFileInfo.FullName)) { var faces = fd.Operator(img); foreach (var face in faces) { var shape = sp.Detect(img, face); return(GetFaceDataPoints1(ref shape, getLabel ? GetLabel(imageFileInfo) : "Not getting label, see argument this function was called with.")); } } Debug.WriteLine($"Unable to get facial feature from {imageFileInfo.Name} as no faces were found!"); return(null); }
/// <summary> /// The main program entry point /// </summary> /// <param name="args">The command line arguments</param> static void Main(string[] args) { // set up Dlib facedetector using (var fd = Dlib.GetFrontalFaceDetector()) { // load input image var img = Dlib.LoadImage <RgbPixel>(inputFilePath); // find all faces in the image var faces = fd.Operator(img); foreach (var face in faces) { // draw a rectangle for each face Dlib.DrawRectangle(img, face, color: new RgbPixel(0, 255, 255), thickness: 4); } // export the modified image Dlib.SaveJpeg(img, "output.jpg"); } }
public void GetImage(string imagePath) { Array2D <RgbPixel> image = Dlib.LoadImage <RgbPixel>(imagePath); using (FrontalFaceDetector fd = Dlib.GetFrontalFaceDetector()) { var faces = fd.Operator(image); foreach (DlibDotNet.Rectangle face in faces) { FullObjectDetection shape = _ShapePredictor.Detect(image, face); ChipDetails faceChipDetail = Dlib.GetFaceChipDetails(shape, 150, 0.25); Array2D <RgbPixel> faceChip = Dlib.ExtractImageChip <RgbPixel>(image, faceChipDetail); Bitmap bitmap1 = faceChip.ToBitmap <RgbPixel>(); MainWindow.main.Statusa1 = bitmap1; Dlib.DrawRectangle(image, face, color: new RgbPixel(0, 255, 255), thickness: 4); } } Bitmap frame = image.ToBitmap <RgbPixel>(); MainWindow.main.Statusa = frame; }
public static string TestCustomImage(string dir) { DataViewSchema predictionPipelineSchema; ITransformer predictionPipeline = mlContext.Model.Load("model.zip", out predictionPipelineSchema); PredictionEngine <FeatureInputData, ExpressionPrediction> predictionEngine = mlContext.Model.CreatePredictionEngine <FeatureInputData, ExpressionPrediction>(predictionPipeline); var img = Dlib.LoadImage <RgbPixel>(dir); // Set up Dlib Face Detector using (var fd = Dlib.GetFrontalFaceDetector()) // ... and Dlib Shape Detector using (var sp = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat")) { // find all faces in the image var faces = fd.Operator(img); // for each face draw over the facial landmarks foreach (var face in faces) { // find the landmark points for this face var shape = sp.Detect(img, face); FeatureInputData inputData = new FeatureInputData { leftEyebrow = CalculateLeftEyebrow(shape), rightEyebrow = CalculateRightEyebrow(shape), leftLip = CalculateLeftLip(shape), rightLip = CalculateRightLip(shape), lipWidth = CalculateLipWidth(shape), lipHeight = CalculateLipHeight(shape) }; ExpressionPrediction prediction = predictionEngine.Predict(inputData); return(prediction.expression.ToString()); } } return("N/A"); }
// The main program entry point static void Main(string[] args) { bool use_mirror = false; // file paths string[] files = Directory.GetFiles("images", "*.*", SearchOption.AllDirectories); List <FullObjectDetection> shapes = new List <FullObjectDetection>(); List <string> emotions = new List <string>(); // Set up Dlib Face Detector using (var fd = Dlib.GetFrontalFaceDetector()) // ... and Dlib Shape Detector using (var sp = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat")) { // load input image for (int i = 0; i < files.Length; i++) { var emotion = GetEmotion(files[i]); var img = Dlib.LoadImage <RgbPixel>(files[i]); // find all faces in the image var faces = fd.Operator(img); // for each face draw over the facial landmarks foreach (var face in faces) { // find the landmark points for this face var shape = sp.Detect(img, face); shapes.Add(shape); emotions.Add(emotion); // draw the landmark points on the image for (var i2 = 0; i2 < shape.Parts; i2++) { var point = shape.GetPart((uint)i2); var rect = new Rectangle(point); if (point == GetPoint(shape, 40) || point == GetPoint(shape, 22)) { Dlib.DrawRectangle(img, rect, color: new RgbPixel(0, 255, 0), thickness: 4); } else { Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 255, 0), thickness: 4); } } } // export the modified image Console.WriteLine(files[i]); Dlib.SaveJpeg(img, "output_" + files[i]); } string header = "leftEyebrow,rightEyebrow,leftLip,rightLip,lipHeight,lipWidth,emotion\n"; System.IO.File.WriteAllText(@"feature_vectors.csv", header); for (var i = 0; i < shapes.Count; i++) { var shape = shapes[i]; var emotion = emotions[i]; using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"feature_vectors.csv", true)) { file.WriteLine(GetLeftEyebrow(shape) + "," + GetRightEyebrow(shape) + "," + GetLeftLip(shape) + "," + GetRightLip(shape) + "," + GetLipWidth(shape) + "," + GetLipHeight(shape) + "," + emotion); if (use_mirror) { file.WriteLine(GetRightEyebrow(shape) + "," + GetLeftEyebrow(shape) + "," + GetRightLip(shape) + "," + GetLeftLip(shape) + "," + GetLipWidth(shape) + "," + GetLipHeight(shape) + "," + emotion); } } } } }
/// <summary> /// The main program entry point /// </summary> /// <param name="args">The command line arguments</param> static void Main(string[] args) { // set up Dlib facedetectors and shapedetectors using (var fd = Dlib.GetFrontalFaceDetector()) using (var sp = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat")) { // load input image var img = Dlib.LoadImage <RgbPixel>(inputFilePath); // find all faces in the image var faces = fd.Operator(img); foreach (var face in faces) { // find the landmark points for this face var shape = sp.Detect(img, face); // build the 3d face model var model = Utility.GetFaceModel(); // get the landmark point we need var landmarks = new MatOfPoint2d(1, 6, (from i in new int[] { 30, 8, 36, 45, 48, 54 } let pt = shape.GetPart((uint)i) select new OpenCvSharp.Point2d(pt.X, pt.Y)).ToArray()); // build the camera matrix var cameraMatrix = Utility.GetCameraMatrix((int)img.Rect.Width, (int)img.Rect.Height); // build the coefficient matrix var coeffs = new MatOfDouble(4, 1); coeffs.SetTo(0); // find head rotation and translation Mat rotation = new MatOfDouble(); Mat translation = new MatOfDouble(); Cv2.SolvePnP(model, landmarks, cameraMatrix, coeffs, rotation, translation); // find euler angles var euler = Utility.GetEulerMatrix(rotation); // calculate head rotation in degrees var yaw = 180 * euler.At <double>(0, 2) / Math.PI; var pitch = 180 * euler.At <double>(0, 1) / Math.PI; var roll = 180 * euler.At <double>(0, 0) / Math.PI; // looking straight ahead wraps at -180/180, so make the range smooth pitch = Math.Sign(pitch) * 180 - pitch; // calculate if the driver is facing forward // the left/right angle must be in the -25..25 range // the up/down angle must be in the -10..10 range var facingForward = yaw >= -25 && yaw <= 25 && pitch >= -10 && pitch <= 10; // create a new model point in front of the nose, and project it into 2d var poseModel = new MatOfPoint3d(1, 1, new Point3d(0, 0, 1000)); var poseProjection = new MatOfPoint2d(); Cv2.ProjectPoints(poseModel, rotation, translation, cameraMatrix, coeffs, poseProjection); // draw the key landmark points in yellow on the image foreach (var i in new int[] { 30, 8, 36, 45, 48, 54 }) { var point = shape.GetPart((uint)i); var rect = new Rectangle(point); Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 255, 0), thickness: 4); } // draw a line from the tip of the nose pointing in the direction of head pose var landmark = landmarks.At <Point2d>(0); var p = poseProjection.At <Point2d>(0); Dlib.DrawLine( img, new DlibDotNet.Point((int)landmark.X, (int)landmark.Y), new DlibDotNet.Point((int)p.X, (int)p.Y), color: new RgbPixel(0, 255, 255)); // draw a box around the face if it's facing forward if (facingForward) { Dlib.DrawRectangle(img, face, color: new RgbPixel(0, 255, 255), thickness: 4); } } // export the modified image Dlib.SaveJpeg(img, "output.jpg"); } }
private static InputDataImages GetFeaturesValuesFromImage(string str) { var returnClass = new InputDataImages(); using (var fd = Dlib.GetFrontalFaceDetector()) // ... and Dlib Shape DetectorS using (var sp = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat")) { // load input image var img = Dlib.LoadImage <RgbPixel>(str); // find all faces i n the image var faces = fd.Operator(img); // for each face draw over the facial landmarks // Create the CSV file and fill in the first line with the header foreach (var face in faces) { // find the landmark points for this face var shape = sp.Detect(img, face); // draw the landmark points on the image for (var i = 0; i < shape.Parts; i++) { var point = shape.GetPart((uint)i); var rect = new Rectangle(point); Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 255, 0), thickness: 4); } /////////////// WEEK 9 LAB //////////////// double[] LeftEyebrowDistances = new double[4]; double[] RightEyebrowDistances = new double[4]; float LeftEyebrowSum = 0; float RightEyebrowSum = 0; //LIP VARIABLES double[] LeftLipDistances = new double[4]; double[] RightLipDistances = new double[4]; float LeftLipSum = 0; float RightLipSum = 0; LeftEyebrowDistances[0] = (shape.GetPart(21) - shape.GetPart(39)).Length; LeftEyebrowDistances[1] = (shape.GetPart(20) - shape.GetPart(39)).Length; LeftEyebrowDistances[2] = (shape.GetPart(19) - shape.GetPart(39)).Length; LeftEyebrowDistances[3] = (shape.GetPart(18) - shape.GetPart(39)).Length; RightEyebrowDistances[0] = (shape.GetPart(22) - shape.GetPart(42)).Length; RightEyebrowDistances[1] = (shape.GetPart(23) - shape.GetPart(42)).Length; RightEyebrowDistances[2] = (shape.GetPart(24) - shape.GetPart(42)).Length; RightEyebrowDistances[3] = (shape.GetPart(25) - shape.GetPart(42)).Length; //LIP LeftLipDistances[0] = (shape.GetPart(51) - shape.GetPart(33)).Length; LeftLipDistances[1] = (shape.GetPart(50) - shape.GetPart(33)).Length; LeftLipDistances[2] = (shape.GetPart(49) - shape.GetPart(33)).Length; LeftLipDistances[3] = (shape.GetPart(48) - shape.GetPart(33)).Length; RightLipDistances[0] = (shape.GetPart(51) - shape.GetPart(33)).Length; RightLipDistances[1] = (shape.GetPart(52) - shape.GetPart(33)).Length; RightLipDistances[2] = (shape.GetPart(53) - shape.GetPart(33)).Length; RightLipDistances[3] = (shape.GetPart(54) - shape.GetPart(33)).Length; for (int i = 0; i < 4; i++) { LeftEyebrowSum += (float)(LeftEyebrowDistances[i] / LeftEyebrowDistances[0]); RightEyebrowSum += (float)(RightEyebrowDistances[i] / RightEyebrowDistances[0]); } LeftLipSum += (float)(LeftLipDistances[1] / LeftLipDistances[0]); LeftLipSum += (float)(LeftLipDistances[2] / LeftLipDistances[0]); LeftLipSum += (float)(LeftLipDistances[3] / LeftLipDistances[0]); RightLipSum += (float)(RightLipDistances[1] / RightLipDistances[0]); RightLipSum += (float)(RightLipDistances[2] / RightLipDistances[0]); RightLipSum += (float)(RightLipDistances[3] / RightLipDistances[0]); double LipWidth = (float)((shape.GetPart(48) - shape.GetPart(54)).Length / (shape.GetPart(33) - shape.GetPart(51)).Length); double LipHeight = (float)((shape.GetPart(51) - shape.GetPart(57)).Length / (shape.GetPart(33) - shape.GetPart(51)).Length); returnClass.LeftEyebrow = LeftEyebrowSum; returnClass.RightEyebrow = RightLipSum; returnClass.LeftLip = LeftLipSum; returnClass.RightLip = RightLipSum; returnClass.LipWidth = (float)LipWidth; returnClass.LipHeight = (float)LipHeight; // export the modified image string filePath = "output" + ".jpg"; Dlib.SaveJpeg(img, filePath); } } using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"TestingFeatureVectorValues.csv", true)) { DirectoryInfo dr = new DirectoryInfo(str); //Console.WriteLine(dr.Parent.Name.ToString()); string ParentFolderName = dr.Parent.Name.ToString(); file.WriteLine(ParentFolderName + "," + returnClass.LeftEyebrow.ToString() + "," + returnClass.RightEyebrow.ToString() + "," + returnClass.LeftLip.ToString() + "," + returnClass.RightLip.ToString() + "," + returnClass.LipWidth.ToString() + "," + returnClass.LipHeight.ToString()); } return(returnClass); }
public static void DetectFacesAsync(string inputFilePath, string subscriptionKey, string uriBase, IFaceClient client, string databaseConnString) { //GetTheListOfPersonGroupsFromAPIAndDeleteThem(client); //temporarily // Getting workers from web and from local db List <List <WorkersForProcessing> > listOfWorkerLists = GetWorkersFromWebApplicationInDifferentListsDividedByClientID(databaseConnString); List <Client> clientsInDB = GetClientListFromLocalDB(); List <DatabaseInfoAboutWorker> workersFromLocalDB = GetWorkersFromLocalDB(); // Change workersFromLocalDB format from List<DatabaseInfoAboutWorker> to List<List<WorkersForProcessing>> List <List <WorkersForProcessing> > listOfWorkerListsFromLocalDB = GroupUpWorkersFromLocalDBInDifferentListsDividedByClientID(workersFromLocalDB); // Prepare List, which will help update list or lists in API ListForUpdatingWithGroupId listForUpdatingWithGroupId = new ListForUpdatingWithGroupId(); listForUpdatingWithGroupId.ListOfGroupId = new List <string>(); listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating = new List <List <WorkersForProcessing> >(); listForUpdatingWithGroupId.UpdateSource = new List <List <WorkersForProcessing> >(); bool listsAreEqual = true; Client clientForUpdate = new Client(); List <Client> listWithClientsFromWEB = new List <Client>(); foreach (var workerList in listOfWorkerLists) { Client newClient = new Client(); newClient.PersonGroupID = Guid.NewGuid().ToString(); newClient.ClientID = workerList[0].ClientID; listWithClientsFromWEB.Add(newClient); } // Check if local DB have old Clients, and if it does, // then thansfer PersonGroupID(API) to the Clients in the new list if ((listWithClientsFromWEB.Count() != 0) && (clientsInDB.Count() != 0)) { if (listWithClientsFromWEB.Count() >= clientsInDB.Count()) { for (int i = 0; i < clientsInDB.Count(); i++) { if (clientsInDB[i].ClientID == listWithClientsFromWEB[i].ClientID) { listWithClientsFromWEB[i].PersonGroupID = clientsInDB[i].PersonGroupID; } } } else { for (int i = 0; i < listWithClientsFromWEB.Count(); i++) { if (listWithClientsFromWEB[i].ClientID == clientsInDB[i].ClientID) { listWithClientsFromWEB[i].PersonGroupID = clientsInDB[i].PersonGroupID; } } } } // Check if worker lists from WEB(aka Clients) and from local DB(aka API) are equal if (listOfWorkerLists.Count() == listOfWorkerListsFromLocalDB.Count()) { for (int j = 0; j < listOfWorkerLists.Count(); j++) { if (listOfWorkerLists[j][0].ClientID != listOfWorkerListsFromLocalDB[j][0].ClientID) { listsAreEqual = false; listForUpdatingWithGroupId.ListOfGroupId.Clear(); listForUpdatingWithGroupId.UpdateSource.Clear(); listForUpdatingWithGroupId.ListOfGroupId.Clear(); break; } if (listOfWorkerLists[j].Count() == listOfWorkerListsFromLocalDB[j].Count()) { for (int i = 0; i < listOfWorkerLists[j].Count(); i++) { if (listOfWorkerLists[j][i].FullName != listOfWorkerListsFromLocalDB[j][i].FullName) { if (!listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Contains(listOfWorkerListsFromLocalDB[j])) { listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Add(listOfWorkerListsFromLocalDB[j]); listForUpdatingWithGroupId.UpdateSource.Add(listOfWorkerLists[j]); clientForUpdate = listWithClientsFromWEB.FirstOrDefault(x => x.ClientID == listOfWorkerListsFromLocalDB[j][0].ClientID); listForUpdatingWithGroupId.ListOfGroupId.Add(clientForUpdate.PersonGroupID); listsAreEqual = false; } } if (!listOfWorkerLists[j][i].Avatar.SequenceEqual(listOfWorkerListsFromLocalDB[j][i].Avatar)) { if (!listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Contains(listOfWorkerListsFromLocalDB[j])) { listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Add(listOfWorkerListsFromLocalDB[j]); listForUpdatingWithGroupId.UpdateSource.Add(listOfWorkerLists[j]); clientForUpdate = listWithClientsFromWEB.FirstOrDefault(x => x.ClientID == listOfWorkerListsFromLocalDB[j][0].ClientID); listForUpdatingWithGroupId.ListOfGroupId.Add(clientForUpdate.PersonGroupID); listsAreEqual = false; } } if (listOfWorkerLists[j][i].ClientID != listOfWorkerListsFromLocalDB[j][i].ClientID) { if (!listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Contains(listOfWorkerListsFromLocalDB[j])) { listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Add(listOfWorkerListsFromLocalDB[j]); listForUpdatingWithGroupId.UpdateSource.Add(listOfWorkerLists[j]); clientForUpdate = listWithClientsFromWEB.FirstOrDefault(x => x.ClientID == listOfWorkerListsFromLocalDB[j][0].ClientID); listForUpdatingWithGroupId.ListOfGroupId.Add(clientForUpdate.PersonGroupID); listsAreEqual = false; } } } } else { listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Add(listOfWorkerListsFromLocalDB[j]); listForUpdatingWithGroupId.UpdateSource.Add(listOfWorkerLists[j]); clientForUpdate = listWithClientsFromWEB.FirstOrDefault(x => x.ClientID == listOfWorkerListsFromLocalDB[j][0].ClientID); listForUpdatingWithGroupId.ListOfGroupId.Add(clientForUpdate.PersonGroupID); listsAreEqual = false; } } } else { listsAreEqual = false; } if (listsAreEqual) { //case when groups on the API are equal to the lists from WEB listWithClientsFromWEB.Clear(); goto BeginningOfInfiniteLoop; } else { //case when client count was equal but one or many clients have been changed in //WEB and needs to change them in the local DB(aka API) if (listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Count() != 0) { for (int i = 0; i < listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Count(); i++) { DeletePersonGroup(client, listForUpdatingWithGroupId.ListOfGroupId[i]).Wait(); CreateAndTrainWorkersPersonGroup(client, listForUpdatingWithGroupId.UpdateSource[i], listForUpdatingWithGroupId.ListOfGroupId[i]).Wait(); } listForUpdatingWithGroupId.UpdateSource.Clear(); listForUpdatingWithGroupId.ListOfGroupId.Clear(); listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Clear(); listOfWorkerListsFromLocalDB = listOfWorkerLists; clientsInDB.Clear(); for (int i = 0; i < listWithClientsFromWEB.Count(); i++) { clientsInDB.Add(listWithClientsFromWEB[i]); } DeleteClientsAndWorkersFromLocalDB(); AddClientsToLocalDB(listWithClientsFromWEB).Wait(); AddWorkersToLocalDB(listOfWorkerLists).Wait(); listWithClientsFromWEB.Clear(); listsAreEqual = true; } else { //case when client count wasn't equal DeletePersonGroups(client, clientsInDB).Wait(); CreateAndTrainWorkersPersonGroups(client, listOfWorkerLists, listWithClientsFromWEB).Wait(); listOfWorkerListsFromLocalDB = listOfWorkerLists; clientsInDB.Clear(); for (int i = 0; i < listWithClientsFromWEB.Count(); i++) { clientsInDB.Add(listWithClientsFromWEB[i]); } DeleteClientsAndWorkersFromLocalDB(); AddClientsToLocalDB(listWithClientsFromWEB).Wait(); AddWorkersToLocalDB(listOfWorkerLists).Wait(); listWithClientsFromWEB.Clear(); listsAreEqual = true; } } //Here ends part, which check Local DB and WEB DB and update Local DB And API if needed BeginningOfInfiniteLoop: List <List <WorkersForProcessing> > templistOfWorkerLists = new List <List <WorkersForProcessing> >(); // Variable is needed for infinite loop bool enabledInfiniteLoop = true; // Infinite loop, which search for new images in the target folder and if search is succesfull, // then use the local library to find the faces in the image. If faces are founded, then starting work with // Microsot Azure API to identify faces and save results to the database. while (enabledInfiniteLoop) { Thread.Sleep((int)(3000)); DirectoryInfo dir = new DirectoryInfo(inputFilePath); // Dlib library using for local face search using (var fd = Dlib.GetFrontalFaceDetector()) { foreach (FileInfo file in dir.GetFiles("*.jpeg")) { Thread.Sleep((int)(60000)); string _inputFilePath = inputFilePath + file.Name; // load input image Array2D <RgbPixel> img = Dlib.LoadImage <RgbPixel>(_inputFilePath); // find all faces in the image Rectangle[] faces = fd.Operator(img); // if search was succesfull then starting work with Microsoft Azure API if (faces.Length != 0) { Console.WriteLine(); Console.WriteLine("Picture " + file.Name + " have faces(according to the DlibDotNetNative library), sending data to Microsoft Azure API"); templistOfWorkerLists = GetWorkersFromWebApplicationInDifferentListsDividedByClientID(databaseConnString); foreach (var workerList in templistOfWorkerLists) { Client newClient = new Client(); newClient.PersonGroupID = Guid.NewGuid().ToString(); newClient.ClientID = workerList[0].ClientID; listWithClientsFromWEB.Add(newClient); } // Check if local DB have old Clients, and if it does, // then thansfer PersonGroupID(API) to the Clients in the new list if ((listWithClientsFromWEB.Count() != 0) && (clientsInDB.Count() != 0)) { if (listWithClientsFromWEB.Count() >= clientsInDB.Count()) { for (int i = 0; i < clientsInDB.Count(); i++) { if (clientsInDB[i].ClientID == listWithClientsFromWEB[i].ClientID) { listWithClientsFromWEB[i].PersonGroupID = clientsInDB[i].PersonGroupID; } } } else { for (int i = 0; i < listWithClientsFromWEB.Count(); i++) { if (listWithClientsFromWEB[i].ClientID == clientsInDB[i].ClientID) { listWithClientsFromWEB[i].PersonGroupID = clientsInDB[i].PersonGroupID; } } } } listsAreEqual = true; if (listOfWorkerLists.Count() == templistOfWorkerLists.Count()) { for (int j = 0; j < listOfWorkerLists.Count(); j++) { if (listOfWorkerLists[j][0].ClientID != templistOfWorkerLists[j][0].ClientID) { listsAreEqual = false; listForUpdatingWithGroupId.ListOfGroupId.Clear(); listForUpdatingWithGroupId.UpdateSource.Clear(); listForUpdatingWithGroupId.ListOfGroupId.Clear(); break; } if (listOfWorkerLists[j].Count() == templistOfWorkerLists[j].Count()) { for (int i = 0; i < listOfWorkerLists[j].Count(); i++) { if (listOfWorkerLists[j][i].FullName != templistOfWorkerLists[j][i].FullName) { if (!listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Contains(listOfWorkerLists[j])) { listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Add(listOfWorkerLists[j]); listForUpdatingWithGroupId.UpdateSource.Add(templistOfWorkerLists[j]); clientForUpdate = listWithClientsFromWEB.FirstOrDefault(x => x.ClientID == listOfWorkerListsFromLocalDB[j][0].ClientID); listForUpdatingWithGroupId.ListOfGroupId.Add(clientForUpdate.PersonGroupID); listsAreEqual = false; } } if (!listOfWorkerLists[j][i].Avatar.SequenceEqual(templistOfWorkerLists[j][i].Avatar)) { if (!listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Contains(listOfWorkerLists[j])) { listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Add(listOfWorkerLists[j]); listForUpdatingWithGroupId.UpdateSource.Add(templistOfWorkerLists[j]); clientForUpdate = listWithClientsFromWEB.FirstOrDefault(x => x.ClientID == listOfWorkerListsFromLocalDB[j][0].ClientID); listForUpdatingWithGroupId.ListOfGroupId.Add(clientForUpdate.PersonGroupID); listsAreEqual = false; } } if (listOfWorkerLists[j][i].ClientID != templistOfWorkerLists[j][i].ClientID) { if (!listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Contains(listOfWorkerLists[j])) { listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Add(listOfWorkerLists[j]); listForUpdatingWithGroupId.UpdateSource.Add(templistOfWorkerLists[j]); clientForUpdate = listWithClientsFromWEB.FirstOrDefault(x => x.ClientID == listOfWorkerListsFromLocalDB[j][0].ClientID); listForUpdatingWithGroupId.ListOfGroupId.Add(clientForUpdate.PersonGroupID); listsAreEqual = false; } } } } else { listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Add(listOfWorkerLists[j]); listForUpdatingWithGroupId.UpdateSource.Add(templistOfWorkerLists[j]); clientForUpdate = listWithClientsFromWEB.FirstOrDefault(x => x.ClientID == listOfWorkerListsFromLocalDB[j][0].ClientID); listForUpdatingWithGroupId.ListOfGroupId.Add(clientForUpdate.PersonGroupID); listsAreEqual = false; } } } else { listsAreEqual = false; } CheckIfPersonGroupsIsOutdated: if (listsAreEqual) { //case when groups on the API are equal to the lists from local and WEB DB // Get ClientID and CameraID from XML (string, string)infoFromXML = GetInfoFromXML(file.Name, inputFilePath); for (int i = 0; i < listOfWorkerLists.Count(); i++) { if (listOfWorkerLists[i][0].ClientID == infoFromXML.Item1) { FindFacesWithAPIIdentifyThemAndAddInDB(_inputFilePath, subscriptionKey, uriBase, file.Name, client, inputFilePath, clientsInDB[i].PersonGroupID).Wait(); file.Delete(); } } listWithClientsFromWEB.Clear(); } else { //case when client count was equal but one or many clients have been changed if (listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Count() != 0) { for (int i = 0; i < listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Count(); i++) { DeletePersonGroup(client, listForUpdatingWithGroupId.ListOfGroupId[i]).Wait(); CreateAndTrainWorkersPersonGroup(client, listForUpdatingWithGroupId.UpdateSource[i], listForUpdatingWithGroupId.ListOfGroupId[i]).Wait(); } listForUpdatingWithGroupId.UpdateSource.Clear(); listForUpdatingWithGroupId.ListOfGroupId.Clear(); listForUpdatingWithGroupId.ListOfWorkerGroupsForUpdating.Clear(); listOfWorkerListsFromLocalDB = templistOfWorkerLists; listOfWorkerLists = templistOfWorkerLists; clientsInDB.Clear(); for (int i = 0; i < listWithClientsFromWEB.Count(); i++) { clientsInDB.Add(listWithClientsFromWEB[i]); } DeleteClientsAndWorkersFromLocalDB(); AddClientsToLocalDB(clientsInDB).Wait(); AddWorkersToLocalDB(listOfWorkerListsFromLocalDB).Wait(); listWithClientsFromWEB.Clear(); listsAreEqual = true; goto CheckIfPersonGroupsIsOutdated; } else { //case when client count wasn't equal DeletePersonGroups(client, clientsInDB).Wait(); CreateAndTrainWorkersPersonGroups(client, templistOfWorkerLists, listWithClientsFromWEB).Wait(); listOfWorkerListsFromLocalDB = templistOfWorkerLists; listOfWorkerLists = templistOfWorkerLists; clientsInDB.Clear(); for (int i = 0; i < listWithClientsFromWEB.Count(); i++) { clientsInDB.Add(listWithClientsFromWEB[i]); } DeleteClientsAndWorkersFromLocalDB(); AddClientsToLocalDB(clientsInDB).Wait(); AddWorkersToLocalDB(listOfWorkerListsFromLocalDB).Wait(); listWithClientsFromWEB.Clear(); listsAreEqual = true; goto CheckIfPersonGroupsIsOutdated; } } } else { file.Delete(); } // After all work is done delete XML file with info about image set, received from client string xmlName = file.Name.Substring(0, file.Name.LastIndexOf('_')) + ".xml"; if (dir.GetFiles(file.Name.Substring(0, file.Name.LastIndexOf('_')) + "*.jpeg").Length <= 0) { foreach (FileInfo xmlFile in dir.GetFiles(xmlName)) { xmlFile.Delete(); } } } } }
private void BackgroundWorkerOnDoWork(object sender, DoWorkEventArgs doWorkEventArgs) { var path = doWorkEventArgs.Argument as string; if (string.IsNullOrWhiteSpace(path) || !File.Exists(path)) { return; } using (var faceDetector = Dlib.GetFrontalFaceDetector()) using (var img = Dlib.LoadImage <RgbPixel>(path)) { Dlib.PyramidUp(img); var dets = faceDetector.Operator(img); var shapes = new List <FullObjectDetection>(); foreach (var rect in dets) { var shape = this._ShapePredictor.Detect(img, rect); if (shape.Parts <= 2) { continue; } shapes.Add(shape); } if (shapes.Any()) { var lines = Dlib.RenderFaceDetections(shapes); foreach (var line in lines) { Dlib.DrawLine(img, line.Point1, line.Point2, new RgbPixel { Green = 255 }); } var wb = img.ToBitmap(); this.pictureBoxImage.Image?.Dispose(); this.pictureBoxImage.Image = wb; foreach (var l in lines) { l.Dispose(); } var chipLocations = Dlib.GetFaceChipDetails(shapes); using (var faceChips = Dlib.ExtractImageChips <RgbPixel>(img, chipLocations)) using (var tileImage = Dlib.TileImages(faceChips)) { // It is NOT necessary to re-convert WriteableBitmap to Matrix. // This sample demonstrate converting managed image class to // dlib class and vice versa. using (var tile = tileImage.ToBitmap()) using (var mat = tile.ToMatrix <RgbPixel>()) { var tile2 = mat.ToBitmap(); this.pictureBoxTileImage.Image?.Dispose(); this.pictureBoxTileImage.Image = tile2; } } foreach (var c in chipLocations) { c.Dispose(); } } foreach (var s in shapes) { s.Dispose(); } } }
static void Main(string[] args) { /// FaceDetectionWith_API Location[] coord = TestImage(fileName, Model.Hog); /// Face DetectionWith_DLIB using (var fd = Dlib.GetFrontalFaceDetector()) { var img = Dlib.LoadImage <RgbPixel>(fileName); // find all faces in the image var faces = fd.Operator(img); foreach (var face in faces) { // draw a rectangle for each face Dlib.DrawRectangle(img, face, color: new RgbPixel(0, 255, 255), thickness: 4); } Dlib.SaveJpeg(img, outputName); } // The first thing we are going to do is load all our models. First, since we need to // find faces in the image we will need a face detector: using (var detector = Dlib.GetFrontalFaceDetector()) // We will also use a face landmarking model to align faces to a standard pose: (see face_landmark_detection_ex.cpp for an introduction) using (var sp = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat")) // And finally we load the DNN responsible for face recognition. using (var net = DlibDotNet.Dnn.LossMetric.Deserialize("dlib_face_recognition_resnet_model_v1.dat")) using (var img = Dlib.LoadImageAsMatrix <RgbPixel>(fileName)) using (var win = new ImageWindow(img)) { var faces = new List <Matrix <RgbPixel> >(); foreach (var face in detector.Operator(img)) { var shape = sp.Detect(img, face); var faceChipDetail = Dlib.GetFaceChipDetails(shape, 150, 0.25); var faceChip = Dlib.ExtractImageChip <RgbPixel>(img, faceChipDetail); //faces.Add(move(face_chip)); faces.Add(faceChip); win.AddOverlay(face); } if (!faces.Any()) { Console.WriteLine("No faces found in image!"); return; } // This call asks the DNN to convert each face image in faces into a 128D vector. // In this 128D vector space, images from the same person will be close to each other // but vectors from different people will be far apart. So we can use these vectors to // identify if a pair of images are from the same person or from different people. var faceDescriptors = net.Operator(faces); // In particular, one simple thing we can do is face clustering. This next bit of code // creates a graph of connected faces and then uses the Chinese whispers graph clustering // algorithm to identify how many people there are and which faces belong to whom. var edges = new List <SamplePair>(); for (uint i = 0; i < faceDescriptors.Count; ++i) { for (var j = i; j < faceDescriptors.Count; ++j) { // Faces are connected in the graph if they are close enough. Here we check if // the distance between two face descriptors is less than 0.6, which is the // decision threshold the network was trained to use. Although you can // certainly use any other threshold you find useful. var diff = faceDescriptors[i] - faceDescriptors[j]; if (Dlib.Length(diff) < 0.6) { edges.Add(new SamplePair(i, j)); } } } Dlib.ChineseWhispers(edges, 100, out var numClusters, out var labels); // This will correctly indicate that there are 4 people in the image. Console.WriteLine($"number of people found in the image: {numClusters}"); // Отобразим результат в ImageList var winClusters = new List <ImageWindow>(); for (var i = 0; i < numClusters; i++) { winClusters.Add(new ImageWindow()); } var tileImages = new List <Matrix <RgbPixel> >(); for (var clusterId = 0ul; clusterId < numClusters; ++clusterId) { var temp = new List <Matrix <RgbPixel> >(); for (var j = 0; j < labels.Length; ++j) { if (clusterId == labels[j]) { temp.Add(faces[j]); } } winClusters[(int)clusterId].Title = $"face cluster {clusterId}"; var tileImage = Dlib.TileImages(temp); tileImages.Add(tileImage); winClusters[(int)clusterId].SetImage(tileImage); } // Finally, let's print one of the face descriptors to the screen. using (var trans = Dlib.Trans(faceDescriptors[0])) { Console.WriteLine($"face descriptor for one face: {trans}"); // It should also be noted that face recognition accuracy can be improved if jittering // is used when creating face descriptors. In particular, to get 99.38% on the LFW // benchmark you need to use the jitter_image() routine to compute the descriptors, // like so: var jitterImages = JitterImage(faces[0]).ToArray(); var ret = net.Operator(jitterImages); using (var m = Dlib.Mat(ret)) using (var faceDescriptor = Dlib.Mean <float>(m)) using (var t = Dlib.Trans(faceDescriptor)) { Console.WriteLine($"jittered face descriptor for one face: {t}"); // If you use the model without jittering, as we did when clustering the bald guys, it // gets an accuracy of 99.13% on the LFW benchmark. So jittering makes the whole // procedure a little more accurate but makes face descriptor calculation slower. Console.WriteLine("hit enter to terminate"); Console.ReadKey(); foreach (var jitterImage in jitterImages) { jitterImage.Dispose(); } foreach (var tileImage in tileImages) { tileImage.Dispose(); } foreach (var edge in edges) { edge.Dispose(); } foreach (var descriptor in faceDescriptors) { descriptor.Dispose(); } foreach (var face in faces) { face.Dispose(); } } } } System.Console.ReadLine(); }
public static void CreateFeatureVectors() { int faceCount = 0; float leftEyebrow, rightEyebrow, leftLip, rightLip, lipHeight, lipWidth; string output; if (currentDataType == Datatype.Testing) { output = testingOutput; } else { output = trainingOutput; } string[] dirs = Directory.GetFiles(currentFilePath, "*.*", SearchOption.AllDirectories); // Set up Dlib Face Detector using (var fd = Dlib.GetFrontalFaceDetector()) // ... and Dlib Shape Detector using (var sp = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat")) { string header = "leftEyebrow,rightEyebrow,leftLip,rightLip,lipWidth,lipHeight,label\n"; // Create the CSV file and fill in the first line with the header System.IO.File.WriteAllText(output, header); foreach (string dir in dirs) { // call function that sets the label based on what the filename contains string label = DetermineLabel(dir); // load input image if (!(dir.EndsWith("png") || dir.EndsWith("jpg"))) { continue; } var img = Dlib.LoadImage <RgbPixel>(dir); // find all faces in the image var faces = fd.Operator(img); // for each face draw over the facial landmarks foreach (var face in faces) { // Write to the console displaying the progress and current emotion Form1.SetProgress(faceCount, dirs.Length - 1); // find the landmark points for this face var shape = sp.Detect(img, face); for (var i = 0; i < shape.Parts; i++) { RgbPixel colour = new RgbPixel(255, 255, 255); var point = shape.GetPart((uint)i); var rect = new DlibDotNet.Rectangle(point); Dlib.DrawRectangle(img, rect, color: colour, thickness: 2); } SetFormImage(img); leftEyebrow = CalculateLeftEyebrow(shape); rightEyebrow = CalculateRightEyebrow(shape); leftLip = CalculateLeftLip(shape); rightLip = CalculateRightLip(shape); lipWidth = CalculateLipWidth(shape); lipHeight = CalculateLipHeight(shape); using (System.IO.StreamWriter file = new System.IO.StreamWriter(output, true)) { file.WriteLine(leftEyebrow + "," + rightEyebrow + "," + leftLip + "," + rightLip + "," + lipWidth + "," + lipHeight + "," + label); } // Increment count used for console output faceCount++; } } if (currentDataType == Datatype.Testing) { var testDataView = mlContext.Data.LoadFromTextFile <FeatureInputData>(output, hasHeader: true, separatorChar: ','); GenerateMetrics(testDataView); } Form1.HideImage(); } }