void timer_Tick(object sender, EventArgs e) { Image <Bgr, Byte> currentFrame = capture.QueryFrame().ToImage <Bgr, Byte>(); if (currentFrame != null) { Image <Gray, Byte> grayFrame = currentFrame.Convert <Gray, Byte>(); var detectedFaces = Frontface_Cascade.DetectMultiScale(grayFrame); foreach (var face in detectedFaces) { grayFrame.ROI = face; currentFrame.Draw(face, new Bgr(0, double.MaxValue, 0), 3); logger.Info("Drawing Rectangle Outline of Face"); int predictresult = recognizer.Predict(grayFrame.Resize(200, 200, 0)).Label; var connectionstring = ConfigurationManager.ConnectionStrings["Test"].ConnectionString; using (SqlConnection connection = new SqlConnection(connectionstring)) { connection.Open(); string query = "SELECT * FROM Attendance.dbo.TrainingData where StudentID = @studentid"; using (SqlCommand command = new SqlCommand(query, connection)) { command.Parameters.Add("@studentid", SqlDbType.Int).Value = predictresult; using (SqlDataReader reader = command.ExecuteReader()) { while (reader.Read()) { textbox1.Text = reader.GetString(1); } } } } } image1.Source = ToBitmapSource(currentFrame); } }
private void btnRecognize_Click(object sender, EventArgs e) { if (string.IsNullOrWhiteSpace(PathPhoto) || string.IsNullOrWhiteSpace( PathModel)) { MessageBox.Show("need to select photo and model"); } else { try { var userBmp = new Bitmap(PathPhoto); var userImage = new Image <Gray, byte>(userBmp); _faceRecognizer.Load(PathModel); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); var userId = result.Label; var userRecord = new FRService().GetById(userId); if (userRecord != null) { lblResult.Text = userRecord.UserName; } else { MessageBox.Show("User not enrolled in db"); } } catch (Exception ex) { MessageBox.Show(ex.Message); } } }
public string RecognizeUser(Image <Gray, byte> userImage) { _faceRecognizer.Load(@"C:\Users\Dom\Documents\Visual Studio 2015\Projects\Emgu\Emgu\Faces\recognizerFilePath\file.yaml"); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); return(result.Label.ToString()); }
public string RecognizeFace(byte[] photoByteArray) { try { if (_eigen == null) { throw new Exception(ConfigurationManager.AppSettings["RecognizerError"]); } Image <Bgr, byte> photo = photoByteArray.ByteArrayToImage(); IFaceDetectionService faceDetectionService = new FaceDetectionService(); Rectangle[] faces = faceDetectionService.DetectFacesAsRect(photo); ICollection <string> recognizedNames = new List <string>(); foreach (Rectangle faceRectangle in faces) { var face = photo.Copy(faceRectangle).ConvertToRecognition(); FaceRecognizer.PredictionResult result = _eigen.Predict(face); if (result.Label > 0) { recognizedNames.Add(_FacesNamesArray[result.Label - 1]); } } return(string.Join(Environment.NewLine, recognizedNames)); } catch (Exception) { throw; } }
public static void Test() { using (Mat image = new Mat("testGamePic.jpg")) { using (Mat uimg = new Mat()) { using (CascadeClassifier face = new CascadeClassifier("haarcascade_frontalface_default.xml")) { CvInvoke.CvtColor(image, uimg, ColorConversion.Bgr2Gray); CvInvoke.EqualizeHist(uimg, uimg); Rectangle[] facesDetected = face.DetectMultiScale(uimg, 1.1, 10, new Size(20, 20)); EigenFaceRecognizer efr = new EigenFaceRecognizer(); efr.Train(new VectorOfMat(uimg), new VectorOfInt(new int[] { 1 })); var res = efr.Predict(uimg); return; } } } }
public int RecognizeUser(Image <Gray, byte> userImage) { _faceRecognizer.Load(_recognizerFilePath); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); return(result.Label); }
private void FaceRecognition(Image <Gray, Byte> detectedFaceImage, Rectangle face, int currentFaceIndex) { string recongnizedFaceName = string.Empty; if (ImageList.Size != 0) { FaceRecognizer.PredictionResult result = FaceRecognizer.Predict(detectedFaceImage.Resize(148, 148, Inter.Cubic)); recongnizedFaceName = NamesList[result.Label]; } else { recongnizedFaceName = "Noma'lum shaxs"; } CurrentImage.Draw(recongnizedFaceName, new Point(face.X - 2, face.Y - 2), FontFace.HersheyDuplex, 0.5, new Bgr(Color.LightGreen)); Bitmap CameraCaptureFace = detectedFaceImage.ToBitmap(); switch (currentFaceIndex) { case 0: pbDetectedFace0.Image = CameraCaptureFace; txtRecognizedFace0.Text = recongnizedFaceName; break; case 1: pbDetectedFace1.Image = CameraCaptureFace; txtRecognizedFace1.Text = recongnizedFaceName; break; default: break; } }
private string FaceRecognition(Image <Gray, Byte> image, int Eigen_Thresh = -1) { if (imageList.Size != 0) { //Eigen Face Algorithm FaceRecognizer.PredictionResult result = recognizer.Predict(image); if (result.Label == -1) { recognitionName = "Unknown"; Config.Eigen_Distance = 0; return(recognitionName); } else { recognitionName = nameList[result.Label]; Config.Eigen_Distance = (float)result.Distance; if (Eigen_Thresh > -1) { Config.Threshold = Eigen_Thresh; } if (Config.Eigen_Distance < Config.Threshold) { return(recognitionName); } else { return("Unknown"); } } } return(""); }
private void EigenFaceRecognition(object sender, EventArgs e) { Frame = _capture.QueryFrame().ToImage <Bgr, byte>(); var frame = Frame.Resize(frameW, frameH, Inter.Cubic); grayFrame = frame.Convert <Gray, Byte>(); var faces = cascadeClassifier.DetectMultiScale(grayFrame, 1.1, 10, Size.Empty); foreach (var f in faces) { eigenFaceRecognizer = new EigenFaceRecognizer(Count, double.PositiveInfinity); eigenFaceRecognizer.Train(trainingImages.ToArray(), indexLabels.ToArray()); var result = eigenFaceRecognizer.Predict(frame.Copy(f).Convert <Gray, Byte>().Resize(100, 100, Inter.Cubic)); if (result.Label == -1) { frame.Draw(f, new Bgr(Color.Red), 2); frame.Draw("Unknown", new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false); } else { frame.Draw(f, new Bgr(Color.Green), 2); frame.Draw(nameLabels[result.Label], new Point(f.X, f.Y - 10), font, 0.8, new Bgr(Color.Blue), 2, new LineType(), false); } alertMessage.Text = (alert + "เริ่มการ Face Recognition ด้วยวิธีการ " + RecognitionType.Text + " แล้ว \r\n" + "Distance " + result.Distance + "\r\n Faces " + faces.Length.ToString()); } imgFrame.Image = frame.Resize(imgBoxW, imgBoxH, Inter.Cubic); }
private void BtnPredict_Click(object sender, EventArgs e) { Frame = Capture.QueryFrame(); var imageFrame = Frame.ToImage <Gray, byte>(); if (imageFrame != null) { var faces = FaceDetection.DetectMultiScale(imageFrame, 1.3, 5); var userDetected = new List <string>(); foreach (var face in faces) { var processImage = imageFrame.Copy(face).Resize(ProcessImageWidth, ProcessImageHeight, Inter.Cubic); var resultEigen = EigenFaceRecognizer.Predict(processImage); if (resultEigen.Label != -1) { userDetected.Add(DataStoreAccess.GetUsername(resultEigen.Label)); } else { userDetected.Add("Unknown"); } } lbResult.Text = string.Join(",", userDetected); } }
public ShellViewModel(Config config) { this.config = config; Title = "Face Recognition"; timer.Interval = TimeSpan.FromMilliseconds(config.TimerResponseValue); timer.Tick += (sender, args) => { ProcessFrame(); }; if (imageList.Size != 0) { //Eigen Face Algorithm FaceRecognizer.PredictionResult result = recognizer.Predict(detectedFace.Resize(100, 100, Inter.Cubic)); FaceName = nameList[result.Label]; cameraCaptureFace = detectedFace.ToBitmap(); } else { FaceName = "Please Add Face"; detectedFace = null; } GetFacesList(config); videoCapture = new VideoCapture(config.ActiveCameraIndex); videoCapture.SetCaptureProperty(CapProp.Fps, 30); videoCapture.SetCaptureProperty(CapProp.FrameHeight, 450); videoCapture.SetCaptureProperty(CapProp.FrameWidth, 370); timer.Start(); }
//Predikcijaš+v xyttrrtr private void btnPredict_Click(object sender, EventArgs e) { try { Mat imageMat = null; while (true) { imageMat = videoCapture.QueryFrame().Clone(); Image <Gray, byte> image = imageMat.ToImage <Gray, byte>().Resize(180, 200, Emgu.CV.CvEnum.Inter.Cubic); Rectangle[] rectangles = classifier.DetectMultiScale(image, 1.1, 4); if (rectangles.Count() > 0) { break; } } if (imageMat != null) { imageMat = imageMat.ToImage <Gray, byte>().Resize(180, 200, Emgu.CV.CvEnum.Inter.Cubic).Mat; faceRecognizer.Read(Application.StartupPath + @"/../../Images/Newfacerecognizer.yml"); var res = faceRecognizer.Predict(imageMat); if (res.Distance > threshold) { txtBox_Label.Text = res.Label.ToString(); txtBox_distance.Text = res.Distance.ToString(); string foundImage = Application.StartupPath + @"/../../Images/face" + res.Label.ToString() + ".bmp"; imgBox1.Image = imageMat.ToImage <Gray, byte>(); picBox.Image = Image.FromFile(foundImage); MessageBox.Show("Successufully found label", "Success", MessageBoxButtons.OK); } else { MessageBox.Show("Not Found", "INFO", MessageBoxButtons.OK); } } videoCapture.Stop(); } catch (Exception err) { MessageBox.Show(err.Message, "INFO", MessageBoxButtons.OK); } }
private void Recognize() { _faceRecognizer.Read(_faceRecognizerPath); var result = _faceRecognizer.Predict(detectedFaceForRecognizer); if (result.Label != 0) { RecognizedFace = _dataStoreAccess.GetUserName(result.Label); } }
public int RecognizeUser(Image <Gray, byte> userImage) { /* Stream stream = new MemoryStream(); * stream.Write(userImage, 0, userImage.Length); * var faceImage = new Image<Gray, byte>(new Bitmap(stream));*/ _faceRecognizer.Read(_recognizerFilePath); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); Debug.Print("Face recognizer result Label:Distance {0}:{1}", result.Label, result.Distance); return(result.Label); }
public int RecognizeUser(Image <Gray, byte> userImage) { _faceRecognizer.Load(_recognizerFilePath); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); Console.WriteLine(result.Distance); if (result.Distance > 0 && result.Distance < 3000) { return(result.Label); } return(0); }
private void ProcessFrame(object sender, EventArgs e) { //retriving the cframe from the camera, converting it to an Image<Bgr, Byte> and resizeing it to match the feed size video_capture.Retrieve(frame, 0); current_frame = frame.ToImage <Bgr, Byte>().Resize(video_feed.Width, video_feed.Height, Inter.Cubic); //convert from bgr format to gray format Mat grayImage = new Mat(); CvInvoke.CvtColor(current_frame, grayImage, ColorConversion.Bgr2Gray); //Enhanceing the image to get a better result CvInvoke.EqualizeHist(grayImage, grayImage); //creating a array of Ractangles that will border the face that was found with the algorithm Rectangle[] faces = faceCasacdeClassifier.DetectMultiScale(grayImage, 1.1, 3, Size.Empty, Size.Empty); if (faces.Length > 0) { //will go through all the found faces foreach (var face in faces) { Image <Bgr, Byte> resultImage = current_frame.Convert <Bgr, Byte>(); resultImage.ROI = face; detected_feed.SizeMode = PictureBoxSizeMode.StretchImage; detected_feed.Image = resultImage.Bitmap; Image <Gray, Byte> gray_image = resultImage.Convert <Gray, Byte>().Resize(200, 200, Inter.Cubic); CvInvoke.EqualizeHist(gray_image, gray_image); var result = recognizer.Predict(gray_image); //Debug.WriteLine(result.Label + ". " + result.Distance); if (result.Label != -1 && result.Distance < 7500) { CvInvoke.PutText(current_frame, workers_names[result.Label], new Point(face.X - 2, face.Y - 2), FontFace.HersheyComplex, 1.0, new Bgr(Color.Orange).MCvScalar); CvInvoke.Rectangle(current_frame, face, new Bgr(Color.Green).MCvScalar, 2); } else { CvInvoke.PutText(current_frame, "Unknown", new Point(face.X - 2, face.Y - 2), FontFace.HersheyComplex, 1.0, new Bgr(Color.Orange).MCvScalar); CvInvoke.Rectangle(current_frame, face, new Bgr(Color.Red).MCvScalar, 2); } } } video_feed.Image = current_frame.Bitmap; if (current_frame != null) { current_frame.Dispose(); } }
//----------------------------------------------------------------------------// //<<<<<<<<------FUNCTION USED TO DETECT AND RECOGNIZE FACES---------->>>>>>>> //----------------------------------------------------------------------------// private void DetectAndRecognizeFaces() { Image <Gray, byte> grayframe = TestImage.Convert <Gray, byte>(); //Assign user-defined Values to parameter variables: MinNeighbors = int.Parse(comboBoxMinNeigh.Text); // the 3rd parameter WindowsSize = int.Parse(textBoxWinSiz.Text); // the 5th parameter ScaleIncreaseRate = Double.Parse(comboBoxScIncRte.Text); //the 2nd parameter //detect faces from the gray-scale image and store into an array of type 'var',i.e 'MCvAvgComp[]' var faces = haar.DetectMultiScale(grayframe, ScaleIncreaseRate, MinNeighbors); MessageBox.Show("Total Faces Detected: " + faces.Length.ToString()); Bitmap BmpInput = grayframe.ToBitmap(); Bitmap ExtractedFace; // an empty "box"/"image" to hold the extracted face. Graphics g; //MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d); //draw a green rectangle on each detected face in image foreach (Rectangle face in faces) { //locate the detected face & mark with a rectangle TestImage.Draw(face, new Bgr(Color.Green), 3); CamImageBox.Image = TestImage; //set the size of the empty box(ExtractedFace) which will later contain the detected face ExtractedFace = new Bitmap(face.Width, face.Height); //assign the empty box to graphics for painting g = Graphics.FromImage(ExtractedFace); //graphics fills the empty box with exact pixels of the face to be extracted from input image g.DrawImage(BmpInput, 0, 0, face, GraphicsUnit.Pixel); try { Image <Bgr, byte> input = new Image <Bgr, byte>(new Bitmap(ExtractedFace)); Image <Gray, byte> modinput = input.Convert <Gray, byte>().Resize(64, 64, Emgu.CV.CvEnum.Inter.Cubic); FaceRecognizer.PredictionResult a = recog.Predict(modinput); MessageBox.Show(a.Label.ToString() + " " + a.Distance.ToString()); } catch (Exception e) { MessageBox.Show(e.Message); } } //Display the detected faces in imagebox CamImageBox.Image = TestImage; //MessageBox.Show(faces.Length.ToString()+ " Face(s) Extracted sucessfully!"); }
public int RecognizeFace(Image <Gray, byte> image) { EigenFaceRecognizer eigen = OldEigen(); FaceRecognizer.PredictionResult result = eigen.Predict(image); Console.WriteLine("ID: " + result.Label + ", " + "Threshold: " + result.Distance); if (result.Label != -1 && result.Distance < treshold)//int.Parse(ConfigurationManager.AppSettings["RecognizerThreshold"])) { return(result.Label); } return(0); }
public int RecognizeUser(Image <Gray, byte> userImage) { Stream stream = new MemoryStream(); stream.Write(userImage.Bytes, 0, userImage.Bytes.Length); var faceImage = new Image <Gray, byte>(new Bitmap(stream)); _faceRecognizer.Read(_recognizerFilePath); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); return(result.Label); }
private void btnRecognizeFaces_Click(object sender, RoutedEventArgs e) { if (picCapture.Source != null) { CvInvoke.CvtColor(Image, grayImage, ColorConversion.Bgr2Gray); //Enchance the image to get better result CvInvoke.EqualizeHist(grayImage, grayImage); System.Drawing.Rectangle[] faces = faceCascadeClassifier.DetectMultiScale(grayImage, 1.1, 3, System.Drawing.Size.Empty, System.Drawing.Size.Empty); if (faces.Length > 0) { foreach (var face in faces) { CvInvoke.Rectangle(Image, face, new Bgr(System.Drawing.Color.Red).MCvScalar, 4); Image <Bgr, Byte> resultImage = Image.Convert <Bgr, Byte>(); resultImage.ROI = face; Image <Gray, Byte> grayFaceResult = resultImage.Convert <Gray, Byte>().Resize(200, 200, Inter.Cubic); CvInvoke.EqualizeHist(grayFaceResult, grayFaceResult); var result = recognizer.Predict(grayFaceResult); //Debug.WriteLine(result.Label + ". " + result.Distance); //Here results found known faces if (result.Label != -1 && result.Distance < 2000) { CvInvoke.PutText(Image, PersonsNames[result.Label], new System.Drawing.Point(face.X - 2, face.Y - 2), FontFace.HersheyComplex, 2.0, new Bgr(System.Drawing.Color.Green).MCvScalar); CvInvoke.Rectangle(Image, face, new Bgr(System.Drawing.Color.Blue).MCvScalar, 5); } //here results did not found any know faces else { CvInvoke.PutText(Image, "Unknown", new System.Drawing.Point(face.X - 2, face.Y - 2), FontFace.HersheyComplex, 4.0, new Bgr(System.Drawing.Color.Orange).MCvScalar); CvInvoke.Rectangle(Image, face, new Bgr(System.Drawing.Color.Red).MCvScalar, 2); } resultImage = Image.Convert <Bgr, Byte>(); picCapture.Source = BitmapHelpers.ToBitmapSource(resultImage); } } else { MessageBox.Show("Choose photo"); } } }
private void FaceRecognition() { if (imageList.Size != 0) { //Eigen Face Algorithm FaceRecognizer.PredictionResult result = recognizer.Predict(detectedFace.Resize(100, 100, Inter.Cubic)); FaceName = nameList[result.Label]; CameraCaptureFace = detectedFace.ToBitmap(); } else { FaceName = "Please Add Face"; } }
public int RecognizeUser(Image <Gray, byte> userImage) { //creating new instatnce of facerecognizer FaceRecognizer _faceRecognizer = new EigenFaceRecognizer(80, 1000); // EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImages.ToArray(), ref termCrit); // LBPHFaceRecognizer _faceRecognizer = new LBPHFaceRecognizer(1,8,8,8, double.PositiveInfinity); string _recognizerFilePath = Application.StartupPath + "/traineddata"; _faceRecognizer.Load(_recognizerFilePath); var result = _faceRecognizer.Predict(userImage.Resize(100, 100, Inter.Cubic)); return(result.Label); }
private void BtnSnapshot_Click(object sender, EventArgs e) { using (var capture = new VideoCapture()) { Thread.Sleep(PluginOptions.CameraDelayMs); using (Image <Bgr, byte> imageFrame = capture.QueryFrame().ToImage <Bgr, byte>()) { if (imageFrame == null) { return; } using (Image <Gray, byte> grayImage = imageFrame.Convert <Gray, byte>()) { if (PluginOptions.UseImageCorrection) { grayImage._EqualizeHist(); } using (var classifier = new CascadeClassifier(PluginOptions.CascadesPath + "haarcascade_frontalface_default.xml")) { Rectangle[] part1 = classifier.DetectMultiScale(grayImage, 1.1, 10); if (part1.Length == 0) { return; } LabelFacesList.Text = ""; foreach (Rectangle face in part1) { using (Image <Gray, byte> resultingImage = imageFrame.Copy(face).Convert <Gray, byte>().Resize(100, 100, Inter.Cubic)) { if (PluginOptions.UseImageCorrection) { resultingImage._EqualizeHist(); } imageFrame.Draw(face, new Bgr(Color.Blue), 2); TestImage.Image = imageFrame; if (TrainedImages.Count == 0) { continue; } using (FaceRecognizer recognizer = new EigenFaceRecognizer()) { recognizer.Read(PluginOptions.PluginPath + "SavedCascade.xml"); LabelFacesList.Text += $"{PluginOptions.PeopleFaces.ElementAt(recognizer.Predict(resultingImage).Label).Value}\r\n"; } } } } } } } }
public static int RecognizeFace(Image <Gray, byte> image) { image = ImageHandler.ResizeGrayImage(image); EigenFaceRecognizer eigen = OldEigen(); EigenFaceRecognizer.PredictionResult result = eigen.Predict(image); /* * if(result.Distance > threshold) * { * return result.Label; * } else * { * return 0; * } */ return(result.Label); }
private void RecognizeFace() { try { var result = _faceRecognizer.Predict(_detectedFaceImage.Resize(100, 100, Inter.Cubic)); if (result.Label != -1 && result.Label != 0) { FaceName = _knownFaces.FirstOrDefault(knownFace => knownFace.FaceId == result.Label).PersonName; Distance = "Distance:\n" + result.Distance.ToString("0.##"); CameraCaptureFace = _detectedFaceImage.ToBitmap(); ActionButton = "Train"; } else { NewFaceFound(); } } catch (Exception) { } }
private void RecognizeFaces(object sender, FaceFoundEventArgs args) { using (Image <Bgr, byte> image = new Image <Bgr, byte>(args.FileName)) { using (var face = image.Copy(args.Face).Convert <Gray, byte>().Resize(100, 100, Inter.Cubic)) { PredictionResult prediction = _currentFaceRecognizer.Predict(face); if (prediction.Label > -1) { DoOnFaceRecognized(this, new FaceRecognizedEventArgs() { Face = args.Face, FileName = args.FileName, LabelId = prediction.Label, Label = _faceLabels[prediction.Label] }); } } } }
public string Predict(Image <Gray, byte> image) { faceRecognizer.Read(Application.StartupPath + @"/../../Images/faceRecognizer.yml"); var res = faceRecognizer.Predict(image); DAL.Models.Label label = null; //if(res.Distance > threshold) if (res.Distance < 3500) { label = _context.Labels.FirstOrDefault(_ => _.LabelNumber == res.Label); var user = _context.Users.FirstOrDefault(_ => _.Id == label.UserId); Log.Logger.Information("{@VisitDate} User{@username} with UserId {@UserId} {@VisitType} home and has access to House", DateTime.Now, user.FirstName + " " + user.LastName, user.Id); Log.CloseAndFlush(); return(user.FirstName + " " + user.LastName); } else { Log.Logger.Information("{@VisitDate} User{@username} with UserId {@UserId} {@VisitType} home and has access to House", DateTime.Now); } return(string.Empty); }
public Variables.RecognitionResult Recognise(Image <Gray, byte> Image) { if (Loaded) { FaceRecognizer.PredictionResult EgienRes = eigen.Predict(Image); FaceRecognizer.PredictionResult FisherRes = fisher.Predict(Image); FaceRecognizer.PredictionResult LbRes = Lp.Predict(Image); if (EgienRes.Label == -1) { Eigen_label = "Unknown"; Eigen_Distance = 0; return(new RecognitionResult() { Label = Eigen_label, Int = 0 }); } else { //TODO : Equalize All Labels Problems Eigen_label = ListOFNames[EgienRes.Label]; if (EgienRes.Label != -1 && FisherRes.Label != -1 && LbRes.Label != -1) { if (EgienRes.Label == LbRes.Label && FisherRes.Label == EgienRes.Label) { return(new RecognitionResult() { Label = Eigen_label, Int = (int)EgienRes.Distance }); } else if (EgienRes.Distance > Eigen_threshold && FisherRes.Distance > 3000 || LbRes.Distance > 100) { return(new RecognitionResult() { Label = Eigen_label, Int = (int)EgienRes.Distance }); } else { return(new RecognitionResult() { Label = "Unkown", Int = 0 }); } } else if (EgienRes.Label != -1) { if (EgienRes.Distance > Eigen_threshold && (int)FisherRes.Distance > 3000 && (int)LbRes.Distance > 100) { return(new RecognitionResult() { Label = Eigen_label, Int = (int)EgienRes.Distance }); } } return(new RecognitionResult() { Label = "Unkown", Int = 0 }); } } else { return(new RecognitionResult() { Label = "Unkown", Int = 0, HasError = true, ErrorMessage = "Not Trained" }); } }
private void ProcessFrame(object sender, EventArgs e) { //1: Bắt hình trên khung picturepox if (videoCapture != null && videoCapture.Ptr != IntPtr.Zero) { videoCapture.Retrieve(frame, 0); currentFrame = frame.ToImage <Bgr, Byte>().Resize(picCapture.Width, picCapture.Height, Inter.Cubic); //2: Phát hiện gương mặt if (facesDetectionEnabled) { //Chuyển đổi từ Bgr(màu) sang Gray(trắng đen) Mat grayImage = new Mat(); CvInvoke.CvtColor(currentFrame, grayImage, ColorConversion.Bgr2Gray); //Tăng chất lượng ảnh CvInvoke.EqualizeHist(grayImage, grayImage); Rectangle[] faces = faceCasacdeClassifier.DetectMultiScale(grayImage, 1.1, 3, Size.Empty, Size.Empty); //Nếu phát hiện được khuôn mặt if (faces.Length > 0) { foreach (var face in faces) { //Vẽ ô vuông quanh khuôn mặt CvInvoke.Rectangle(currentFrame, face, new Bgr(Color.Red).MCvScalar, 2); //3: Thêm ảnh để train //Thêm ảnh vào picturebox đối chiếu Image <Bgr, Byte> resultImage = currentFrame.Convert <Bgr, Byte>(); resultImage.ROI = face; picDetected.SizeMode = PictureBoxSizeMode.StretchImage; picDetected.Image = resultImage.Bitmap; if (EnableSaveImage) { //Tạo thư mục lưu hình training nếu thư mục chưa tồn tại string path = Directory.GetCurrentDirectory() + @"\TrainedImages"; if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } //Lưu 10 ảnh cho việc train gương mặt //Tạo task mới để tránh treo GUI Task.Factory.StartNew(() => { for (int i = 0; i < 10; i++) { //resize hình sau khi lưu resultImage.Resize(200, 200, Inter.Cubic).Save(path + @"\" + txtPersonName.Text + "_" + DateTime.Now.ToString("dd-mm-yyyy-hh-mm-ss") + ".jpg"); Thread.Sleep(1000); if (i == 10) { MessageBox.Show("Training hoàn tất"); } } }); } EnableSaveImage = false; if (btnAddPerson.InvokeRequired) { btnAddPerson.Invoke(new ThreadStart(delegate { btnAddPerson.Enabled = true; })); } // 5: Phát hiện gương mặt if (isTrained) { Image <Gray, Byte> grayFaceResult = resultImage.Convert <Gray, Byte>().Resize(200, 200, Inter.Cubic); CvInvoke.EqualizeHist(grayFaceResult, grayFaceResult); var result = recognizer.Predict(grayFaceResult); var LBPresult = recognizerLBPH.Predict(grayFaceResult); pictureBox1.Image = grayFaceResult.Bitmap; pictureBox2.Image = TrainedFaces[LBPresult.Label].Bitmap; Debug.WriteLine(result.Label + ". " + result.Distance); //Kết quả khi phát hiện gương mặt if (LBPresult.Label != -1 && LBPresult.Distance < 500) { CvInvoke.PutText(currentFrame, PersonsNames[LBPresult.Label], new Point(face.X - 2, face.Y - 2), FontFace.HersheyComplex, 1.0, new Bgr(Color.Orange).MCvScalar); CvInvoke.Rectangle(currentFrame, face, new Bgr(Color.Green).MCvScalar, 2); //MessageBox.Show(PersonsNames[LBPresult.Label]); if (PersonsNames[LBPresult.Label] != null) { txtFaceID.Invoke(new MethodInvoker(delegate() { txtFaceID.Text = PersonsNames[LBPresult.Label].ToString(); results.Add(txtFaceID.Text); })); PersonsNames[LBPresult.Label] = null; } else { if (results.Any()) { string res = results.FirstOrDefault(x => x != null); SINHVIEN sv = TimKiem(txtFaceID.Text); Invoke(new Action(() => { ShowInfo(sv); })); AddCheckIn(sv); Flat = false; videoCapture.Stop(); } } } //Nếu mặt không có trong dữ liệu else { CvInvoke.PutText(currentFrame, "Unknown", new Point(face.X - 2, face.Y - 2), FontFace.HersheyComplex, 1.0, new Bgr(Color.Orange).MCvScalar); CvInvoke.Rectangle(currentFrame, face, new Bgr(Color.Red).MCvScalar, 2); } } } } } //Xuất video capture lên Picture Box picCapture picCapture.Image = currentFrame.Bitmap; } //Dispose Frame hiện tại sau khi chạy processing if (currentFrame != null) { currentFrame.Dispose(); } }
public int RecognizeUser(byte[] userImage) { Image <Gray, byte> convertedImage = StreamConverter.ByteToImageResize(userImage); return(_faceRecognizer.Predict(convertedImage).Label); }