public FaceDetection(MainWindow mainWindow_arg) { mainWindow = mainWindow_arg; string curDir = Directory.GetCurrentDirectory(); try { //haar = new HaarCascade("C:\\Emgu\\emgucv-windows-x86 2.3.0.1416\\opencv\\data\\haarcascades\\haarcascade_frontalface_alt.xml"); //haar = new HaarCascade(@"C:\Emgu\emgucv-windows-x86 2.3.0.1416\opencv\data\haarcascades\haarcascade_frontalface_alt.xml"); //haar = new HaarCascade(curDir + "\\haarcascade_frontalface_alt2.xml"); haarFaces = new HaarCascade("haarcascade_frontalface_alt_tree.xml"); haarEyes = new HaarCascade("haarcascade_eye.xml"); //haar = new HaarCascade("haarcascade_frontalface_alt2.xml"); } catch(Exception e){ Console.WriteLine(e.StackTrace); } }
public Form1() { InitializeComponent(); grabber = new Emgu.CV.Capture(); grabber.QueryFrame(); frameWidth = grabber.Width; frameHeight = grabber.Height; detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); box = new MCvBox2D(); // ellip = new Ellipse(); _face = new HaarCascade("haarcascade_frontalface_alt_tree.xml"); eyes = new HaarCascade("haarcascade_mcs_eyepair_big.xml"); reye = new HaarCascade("haarcascade_mcs_lefteye.xml"); leye = new HaarCascade("haarcascade_mcs_righteye.xml"); label1.Hide(); }
public Main() { InitializeComponent(); //Intializes the form UI. capture = new Capture(0); //create a camera capture capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, 1280); capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, 720); haar = new HaarCascade("C:\\Emgu\\emgucv-windows-universal-cuda 2.9.0.1922\\opencv\\data\\haarcascades\\haarcascade_frontalface_default.xml"); ArrayList Images = new ArrayList(); //Serial setiings foreach (String s in System.IO.Ports.SerialPort.GetPortNames()) { txtPort.Items.Add(s); } labelHide(); }
public void ParseTest6() { string fileName = Path.Combine(TestContext.CurrentContext.TestDirectory, "Resources", "haarcascade_mcs_nose.xml"); StreamReader stringReader = new StreamReader(fileName); HaarCascade cascade = HaarCascade.FromXml(stringReader); Assert.AreEqual(20, cascade.Stages.Length); Assert.AreEqual(289, cascade.Stages[15].Trees.Length); for (int i = 0; i < cascade.Stages.Length; i++) { Assert.AreEqual(1, cascade.Stages[i].Trees[0].Length); } Assert.AreEqual(true, cascade.HasTiltedFeatures); // StringWriter sw = new StringWriter(); // cascade.ToCode(sw, "NoseCascadeClassifier"); // string str = sw.ToString(); }
private void addfaces_Load(object sender, EventArgs e) { button4.Visible = false; label1.Visible = false; textBox2.Visible = false; this.FormClosed += new FormClosedEventHandler(f_FormClosed); button1.Visible = false; button2.Visible = false; try { Haar = new HaarCascade("haarcascade_frontalface_alt_tree.xml"); Haar2 = new HaarCascade("haarcascade_frontalface_default.xml"); Haar3 = new HaarCascade("haarcascade_profilefaced.xml"); } catch (Exception ex) { MessageBox.Show(ex.ToString()); } }
private void CameraCapture_Load(object sender, EventArgs e) { try { _capture = new Capture(); } catch (NullReferenceException excpt) { MessageBox.Show(excpt.Message); } //Initialize the HaarCascade _haarFace = new HaarCascade("haarcascade_frontalface_default.xml"); _haarHand = new HaarCascade("haarcascade_hand.xml"); _haarFist = new HaarCascade("hand_two.xml"); //close hand _haarPalm = new HaarCascade("palm.xml"); //_haarFist = new HaarCascade("hand.xml"); //close hand //Set the TrackBar value double val = defaultValue + FactorTrackBar.Value * 0.01; displayScaleFactor.Text = val.ToString("F2"); //F2 2-digits after decimal point displayMinNeighbors.Text = NeighborsTrackBar.Value.ToString(); comboBox.DisplayMember = "Text"; comboBox.ValueMember = "Value"; //Initialize ComboBox items var items = new[] { new { Text = "Fist", Value = _haarFist }, new { Text = "Hand", Value = _haarHand }, new { Text = "Palm", Value = _haarPalm }, new { Text = "Face", Value = _haarFace } }; //Set ComboBox items comboBox.DataSource = items; //Show ToolTip toolTip.SetToolTip(this.FactorTrackBar, "Increasing search window size by increasing Scale Factor."); toolTip.SetToolTip(this.NeighborsTrackBar, "Increasing Min Neighbors get less Detection but high quality."); }
private void Form1_Load(object sender, EventArgs e) { haar = new HaarCascade("haarcascade_frontalface_alt2.xml"); sensor = KinectSensor.KinectSensors[0]; sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); sensor.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30); sensor.SkeletonStream.Enable(); running = false; PosX = 320; disT = false; try { sensor.Start(); } catch { MessageBox.Show("Sensor not star"); } try { sensor.ColorFrameReady += FrameReady; } catch { MessageBox.Show("colour frame not Found"); } try { sensor.DepthFrameReady += DepthFrameReady; } catch { MessageBox.Show("Defth frame not Found"); } posY = 0; sensor.ElevationAngle = 0; connectSp = false; DisconnectButton.Hide(); Application.Idle += new EventHandler(take1); }
private void Form1_Load(object sender, EventArgs e) { con = new MySqlConnection("server=localhost;database=test;uid=root;pwd=root"); this.FormClosed += new FormClosedEventHandler(f_FormClosed); label1.Visible = false; label2.Visible = false; label3.Visible = false; label4.Visible = false; try { Haar = new HaarCascade("haarcascade_frontalface_alt_tree.xml"); Haar2 = new HaarCascade("haarcascade_frontalface_default.xml"); Haar3 = new HaarCascade("haarcascade_profilefaced.xml"); } catch (Exception ex) { MessageBox.Show(ex.ToString()); } load_databases(); }
internal void Start() { //Initialize the capture device grabber = new Capture(); grabber.QueryFrame(); //Initialize the FrameGraber event m_Idle_Handler = new EventHandler(FrameGrabber); Application.Idle += m_Idle_Handler; //Load haarcascades for face detection frontal_face_pattern = new HaarCascade("haarcascade_frontalface_default.xml"); //eye = new HaarCascade("haarcascade_eye.xml"); try { //Load of previus trainned faces and labels for each image string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face" + tf + ".bmp"; training_images.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); face_labels.Add(Labels[tf]); recognized_people.Add(Labels[tf]); recognized_faces.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); recognized_flags.Add(false); } Train(); } catch (Exception e) { //MessageBox.Show(e.ToString()); MessageBox.Show("Nothing in binary database, please add at least a face(Simply train the prototype with the Add Face Button).", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } m_Terminal.Express("Vision started.", Expression_Types.Information); }
/// <summary> /// Функция проверки изображения на наличие на нем лиц /// Функция обнаруживает лица, расположенные под углом отклонения менее 30 градусов. /// Это обусловлено ограничениями, накладываемыми алгоритмом Виолы-Джонса. /// </summary> /// <param name="image">Входное изображение </param> /// <returns>true если на изображении есть лица, false если лиц нет</returns> public static bool ContainsFaces(Bitmap image) { // Полный путь к файлу каскада Хаара string cascadeFile = Path.Combine(Environment.CurrentDirectory, @"cascades\haarcascade_frontalface_default.xml"); // Открыть файл каскада var cascade = new HaarCascade(cascadeFile); // Преобразовать входное изображение в нужный формат var convertedImage = new Image <Bgr, byte>(image); var grayImage = convertedImage.Convert <Gray, byte>(); // Обнаружить лица на изображении // (описание параметров функции см. на http://www.emgu.com/wiki/files/2.4.2/document/html/11c784fc-7d30-a921-07ec-ecdb7d217bbe.htm) var faces = grayImage.DetectHaarCascade(cascade, 1.1, 4, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(image.Width / 15, image.Height / 15))[0]; // Если на изображении есть хотя бы одно лицо - вернуть true, иначе false return(faces.Length > 0); }
public FrmPrincipal() { InitializeComponent(); btn_dung.Visible = false; btn_tieptuc.Visible = false; btn_tieptuc.Enabled = false; button2.Visible = false; dscamera = new FilterInfoCollection(FilterCategory.VideoInputDevice); foreach (FilterInfo i in dscamera) { cbx_mayanh.Items.Add(i.Name); } cbx_mayanh.SelectedIndex = 0; //MessageBox.Show(cbx_mayanh.Items[0].ToString()); //Load haarcascades for face detection face = new HaarCascade("haarcascade_frontalface_default.xml"); //eye = new HaarCascade("haarcascade_eye.xml"); try { //Load of previus trainned faces and labels for each image string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face" + tf + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); labels.Add(Labels[tf]); } } catch (Exception e) { MessageBox.Show("Không có gì trong cơ sở dữ liệu nhị phân, vui lòng thêm ít nhất một khuôn mặt (Đơn giản chỉ cần huấn luyện nguyên mẫu bằng nút Thêm khuôn mặt).", "học gương mặt", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } }
public void LoadHaarCascade(string filepath) { try { haarCascade = new HaarCascade(filepath); haarCascadePath = filepath; if (OnHaarCascadeLoaded != null) { OnHaarCascadeLoaded(true); } } catch (Exception ex) { if (OnHaarCascadeLoaded != null) { OnHaarCascadeLoaded(false); } ErrorLogger.ProcessException(ex, true); } }
/// <summary> /// Jedyny konstruktor. Odczytuje twarze z bazy i inicjuje wewnętrzny obiekt odpowiedzialny za rozpoznawanie twarzy. /// </summary> public Recognizer() { readFiles(); //odczyt twarzy z bazy //ustawienie etykiet i dokładności z jaką ma być wykonywane ropoznawanie MCvTermCriteria criteria = new MCvTermCriteria(labels.Length, 0.001); //utworzenie nowego obiektu do rozpoznawania twarzy //obiekt ten wylicza eigenvectors dla każdej twarzy w bazie //oraz dla każdej sprawdzanej twarz, po czym wartości wyliczone //dla sprawdzanej twarzy porównuje z wartościami wyliczonymi dla tych //twarzy z bazy eor = new EigenObjectRecognizer( faces, //tablica twarzy labels, //etykiety odpowiadające twarzom 3000, //poziom progowania pomiędzy poszczególnymi eigenvectors ref criteria //kryterium ); //utwórz wzorzec do wykrywania twarzy haar = new HaarCascade("haarcascade_frontalface_default.xml"); }
public CadastrarPessoa() { InitializeComponent(); //Atribuindo os valores de altura e largura heigth = this.Height; width = this.Width; //atribuindo o algoritmo haarcascades dentro da variável face face = new HaarCascade("haarcascade_frontalface_default.xml"); //Fazendo tratamento try { //Carrega os rostos dbc.byteimg(); //ToolStripSeparator os nomes do usuarios Labels = dbc.Name; //Variável que vai mostrar o total de usuarios localizados em uma label NumLabels = dbc.usuariototal; ContTrain = NumLabels; //esse for vai percorrer o numero de nomes registrados for (int tf = 0; tf < NumLabels; tf++) { con = tf; Bitmap bmp = new Bitmap(dbc.bypimg(con)); imatreinada.Add(new Image <Gray, byte>(bmp)); //Aqui carrega o nome que esta dentro do tf no momento labels.Add(Labels[tf]); } } catch (Exception e) { //Caso o número da variavel for 0 abaixo a mensagem do tratamento executado MessageBox.Show(e + " Não existe ninguem cadastrado com esse rosto na base de dados, por favor, cadastre o mesmo !!!", "Cadastrar rostos em BD", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } }
//Есть утечки памяти private void FindFaces_Click(object sender, RoutedEventArgs e) { TaskItem taskItem = null; taskItem = new TaskItem("Поиск лиц на фотографии", false, () => { Image <Bgr, Byte> image = selectedImages[currentPreviewImage].Copy(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray); CvInvoke.EqualizeHist(ugray, ugray); List <System.Drawing.Rectangle> Faces = new List <System.Drawing.Rectangle>(); Faces.AddRange(HaarCascade.DetectMultiScale(ugray, 1.1, 10, new System.Drawing.Size(20, 20))); foreach (var face in Faces) { image.Draw(face, new Bgr(0, 0, 255), 3); if (PeopleData.Face.Count() != 0) { var result = FaceRecognizer.Predict(ugray.ToImage <Gray, Byte>().Copy(face).Resize(100, 100, Inter.Cubic)); if (result.Label > 0 && result.Distance <= 100) { image.Draw(PeopleData.Name[result.Label - 1], new System.Drawing.Point(face.X, face.Y - 10), FontFace.HersheyComplex, 0.8, new Bgr(0, 0, 255)); } } } } selectedImages[currentPreviewImage] = image.Copy(); Dispatcher.Invoke(() => { SetImagePreview(currentPreviewImage); taskItem.Remove(); }); }); TaskList.Items.Add(taskItem); }
public Form1() { InitializeComponent(); //Read the HaarCascade object _face = new HaarCascade("haarcascades/haarcascade_frontalface_alt2.xml"); if (_capture == null) { try { _capture = new Capture(); } catch (NullReferenceException excpt) { MessageBox.Show(excpt.Message); return; } } Application.Idle += ProcessImage; }
/// <summary> /// Constructor of this form /// </summary> public Form1() { InitializeComponent(); DBConnecttion.getInstance().openConnection(); //Establising connection with sqllite database /* * * these are haarcascades for detecting face,eyes,nose,mouth. * comment out with features you don't want to detect. * if you comment from here you also have to comment related code from "FrameProcedure" function * */ faceDetected = new HaarCascade("haar/haarcascade_frontalface_default.xml"); //HaarCascade is to detect face eyesDetected = new HaarCascade("haar/eye.xml"); //HaarCascade is to detect eyes //noseDetected = new HaarCascade("haar/nose.xml"); //HaarCascade is to detect eyes // mouthDetected = new HaarCascade("haar/mouth.xml"); //HaarCascade is to detect eyes try { //importing trained data from db file and adding to runtime list string Labelsinf = File.ReadAllText(Application.StartupPath + "/Faces/Faces.txt"); List <string> Labels = Labelsinf.Split(',').Distinct().ToList <string>(); Numlabels = Labels.Count(); Count = Numlabels; string FacesLoad; foreach (string i in Labels) { trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + $"/Faces/{i}.bmp")); labels.Add(i); } }catch (Exception e) { } }
public Mainform() { InitializeComponent(); facedetector = new HaarCascade(Application.StartupPath + "/data/xml/haarcascade_frontalface_default.xml"); mouthdetector = new HaarCascade(Application.StartupPath + "/data/xml/haarcascade_mcs_mouth.xml"); try { countfaces = Directory.GetFiles(Application.StartupPath + "/data/faces/").Length; countmouths = Directory.GetFiles(Application.StartupPath + "/data/smiles/").Length + Directory.GetFiles(Application.StartupPath + "/data/sads/").Length; foreach (string file in Directory.EnumerateFiles(Application.StartupPath + "/data/faces/", "*.bmp")) { trainedfaces.Add(new Image <Gray, byte>(file)); labelsface.Add(Path.GetFileNameWithoutExtension(file)); } foreach (string file in Directory.EnumerateFiles(Application.StartupPath + "/data/smiles/", "*.bmp")) { trainedmouths.Add(new Image <Gray, byte>(file)); labelsmouth.Add("smile"); } foreach (string file in Directory.EnumerateFiles(Application.StartupPath + "/data/sads/", "*.bmp")) { trainedmouths.Add(new Image <Gray, byte>(file)); labelsmouth.Add("sad"); } } catch { MessageBox.Show("Looks like smth went wrong trying to read data files"); } smileimage = Image.FromFile(Application.StartupPath + "/data/smile.png"); sadimage = Image.FromFile(Application.StartupPath + "/data/sad.png"); emojipicture.Image = smileimage; }
public frmAddNewEmployee(Configuration appConfig) { applicationConfiguration = (AppConfig)appConfig; InitializeComponent(); forceCustomReInitialize(); btnReset.Enabled = false; btnAddNewEmployee.Enabled = false; cbGioitinh.SelectedIndex = 0; connect(); dt.Clear(); //SqlCommand command = new SqlCommand(); //command.Connection = con; //command.CommandType = CommandType.Text; //command.CommandText = @"SELECT * from tblEmployee "; getdata(); //da.SelectCommand = command; //da.Fill(dt); haar = new HaarCascade("haarcascade_frontalface_default.xml"); try { foreach (DataRow dr in dt.Rows) { EmployeeID.Add(dr.ItemArray[0].ToString()); } ContTrain = NumLabels = EmployeeID.Count(); for (int i = 1; i <= NumLabels; i++) { LoadFaces = String.Format("face" + i + ".bmp"); trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); } } catch (Exception) { } }
public Form1() { InitializeComponent(); faceDetected = new HaarCascade("haarcascade_frontalface_default.xml"); try { string Labelisinf = File.ReadAllText(Application.StartupPath + "/Faces/Faces.txt"); string[] Labels = Labelisinf.Split(','); NumLables = Convert.ToInt16(Labels[0]); Count = NumLables; string FacesLoad; for (int i = 1; i < NumLables + 1; i++) { FacesLoad = "face" + i + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/Faces/" + FacesLoad)); labels.Add(Labels[i]); } } catch (Exception ex) { MessageBox.Show("Nothing in the Database"); } }
public Form1() { InitializeComponent(); faceDetected = new HaarCascade("haarcascade_frontalface_default.xml"); try { string labelA = File.ReadAllText(Application.StartupPath + "/Faces/Face.txt"); string[] labels = labelA.Split(','); //first label will be nuber of faces saved numLabels = Convert.ToInt16(labels[0]); count = numLabels; string facesLoad; for (int i = 1; i < numLabels + 1; i++) { facesLoad = "face " + i + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/Faces/Face.txt")); Labels.Add(labels[i]); } } catch (Exception ex) { MessageBox.Show("Nothing in the database"); } }
public Form1() { InitializeComponent(); // HaarCascade is for face detection. faceDetected = new HaarCascade("haarcascade_frontalface_default.xml"); try{ string Labelsinf = File.ReadAllText(Application.StartupPath + "/Faces/faces.txt"); string[] Labels = Labelsinf.Split(','); // The first label before , will be the number of faces saved. numLabels = Convert.ToInt16(Labels[0]); count = numLabels; string FacesLoad; for (int i = 0; i < numLabels + 1; i++) { FacesLoad = "face" + i + ".bmp"; trainingImage.Add(new Image <Gray, byte>(Application.StartupPath + "/Faces/Faces.txt")); labels.Add(Labels[i]); } }catch (Exception) { MessageBox.Show("Nothing is in database"); } }
public Form1() { InitializeComponent(); faceDetected = new HaarCascade("haarcascade_frontalface_default.xml"); try { string labelsInfo = File.ReadAllText(Application.StartupPath + "Faces/Faces.txt"); string[] Labels = labelsInfo.Split(','); numLabels = Convert.ToInt16(Labels[0].Length); count = numLabels; string facesLoad; for (int i = 1; i < numLabels + 1; i++) { facesLoad = "face" + i + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/Faces/Faces.txt")); labels.Add(Labels[i]); } } catch (Exception ex) { MessageBox.Show(@"Nothing found."); } }
public Form1() { InitializeComponent(); face = new HaarCascade("haarcascade_frontalface_default.xml"); try { string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face" + tf + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); labels.Add(Labels[tf]); } } catch (Exception e) { } }
public Form1() { InitializeComponent(); face = new HaarCascade("haarcascade_frontalface_default.xml"); try { clsDA.Consultar(d); string[] Labels = clsDA.Nombre; Numlabels = clsDA.TotalRostros; ContTrain = Numlabels; for (int i = 0; i < Numlabels; i++) { con = i; Bitmap bmp = new Bitmap(clsDA.ConvertBinaryToImg(con)); trainingImages.Add(new Image <Gray, byte>(bmp)); labels.Add(Labels[i]); } } catch (Exception e) { MessageBox.Show("Sin Rostros para cargar"); } }
public frmThemSinhVien() { InitializeComponent(); try { haar = new HaarCascade("haarcascade_frontalface_default.xml"); } catch (Exception ex) { MessageBox.Show(ex.Message); } multiCam = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); int i = 1; string name; foreach (DsDevice cam in multiCam) { name = i + ": " + cam.Name; cbCamIndex.Items.Add(name); i++; } }
public static Bitmap emguHaarDetect(Bitmap bt) { Image <Bgr, byte> img = new Image <Bgr, byte>(bt); HaarCascade haar = new HaarCascade(haarXmlPath); if (haar == null || img == null) { return(null); } MCvAvgComp[] faces = haar.Detect(img.Convert <Gray, byte>(), 1.4, 1, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); if (faces.Length > 0) { foreach (MCvAvgComp face in faces) { img.Draw(face.rect, new Bgr(Color.Yellow), 2); } return(img.ToBitmap()); } else { return(null); } }
public Form1() { InitializeComponent(); this.Text = "T.U.R.D.S. (Tiny User Recognition and Designator System)"; this.Size = new System.Drawing.Size(770, 750); this.MaximizeBox = false; failedWebcamLabel.Hide(); //Adding that string to the list on execution to make it simpler for the Excel File creation currentUserListBox.Items.Add("User Name, Date Logged on"); //haarcascade is for face detection faceDetected = new HaarCascade("haarcascade_frontalface_default.xml"); try { //labelsinf reads text from herever the file startsup + the folder Faces and the file txt string Labelsinf = File.ReadAllText(Application.StartupPath + "/Faces/Faces.txt"); string[] Labels = Labelsinf.Split(','); //splits each object in Labels array with ',' //the first label before ',' will be the number of faces saved. NumLables = Convert.ToInt16(Labels[0]); Count = NumLables; string FacesLoad; for (int i = 1; i < NumLables + 1; i++) { FacesLoad = "face" + i + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + $"/Faces/{FacesLoad}")); labels.Add(Labels[i]); } } catch (Exception ex) { MessageBox.Show("No match in the Database"); //throw; } }
private static RecognizerResult RecognizeFromImage(FaceRecognizer FaceRec, String ImagePath) { RecognizerResult Result = new RecognizerResult(); //Lay moi ten anh, ko lay toan bo duong dan Result.ImageLink = System.IO.Path.GetFileName(ImagePath); //Dua anh vao, dua ket qua ra if (Haar == null) { Haar = new HaarCascade(HAAR_XML_PATH); } //Chuyen anh trang den roi bat dau recognize Image <Bgr, byte> RawImage = new Image <Bgr, byte>(ImagePath); Image <Gray, byte> Image = RawImage.Clone().Convert <Gray, byte>(); var FacesDetected = Image.DetectHaarCascade(Haar, DETECT_SCALE, MIN_NEIGHBOR, 0, new System.Drawing.Size(MIN_SIZE, MIN_SIZE))[0]; foreach (var Face in FacesDetected) { FaceRegion FaceReg = new FaceRegion(Face.rect.X, Face.rect.Y, Face.rect.Width, Face.rect.Height); //Nhan dien face la cua ai. Image <Gray, byte> FaceImage = Image.Copy(Face.rect).Resize(TRAINING_DATA_SIZE, TRAINING_DATA_SIZE, INTER.CV_INTER_CUBIC); FaceImage._EqualizeHist(); FaceRecognizer.PredictionResult PR = FaceRec.Predict(FaceImage); FaceReg.StudentID = PR.Label; FaceReg.StudentName = GetUserName(PR); Result.FaceList.Add(FaceReg); } return(Result); }
public FrmP() { InitializeComponent(); button2.Enabled = false; con.Open(); bind(); //Load haarcascades for face detection face = new HaarCascade("haarcascade_frontalface_default.xml"); //eye = new HaarCascade("haarcascade_eye.xml"); try { //Load of previus trainned faces and labels for each image string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face" + tf + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + LoadFaces)); labels.Add(Labels[tf]); } } catch (Exception e) { MessageBox.Show("Nothing in Face database, please add at least a face(Sending To Admin section to Add Face)", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); // FrmP frma = new FrmP(); // frma. // this.Close(); this.WindowState = FormWindowState.Minimized; // this.Hide(); Authen frm = new Authen(); frm.Show(); } }
public YokalmaSistemi() { InitializeComponent(); list_view_var_olanlar.Columns.Add("Adı Soyadı", 100); ComboBoxUpdate(); Capture capture = new Capture(); capture.Start(); if (capture == null) { MessageBox.Show("Kamera Açılamadı"); } else { capture.ImageGrabbed += (a, b) => { var image = capture.RetrieveBgrFrame(); var grayimage = image.Convert <Gray, byte>(); HaarCascade haaryuz = new HaarCascade("haarcascade_frontalface_default.xml"); MCvAvgComp[][] Yuzler = grayimage.DetectHaarCascade(haaryuz, 1.2, 5, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(15, 15)); MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 0.5, 0.5); foreach (MCvAvgComp yuz in Yuzler[0]) { var sadeyuz = grayimage.Copy(yuz.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC); pic_kucuk_res.Image = sadeyuz.ToBitmap(); if (train.IsTrained) { name = train.Recognise(sadeyuz); int match_value = (int)train.Get_Eigen_Distance; image.Draw(name + " ", ref font, new Point(yuz.rect.X - 2, yuz.rect.Y - 2), new Bgr(Color.SteelBlue)); } image.Draw(yuz.rect, new Bgr(Color.Purple), 2); } pic_kamera.Image = image.ToBitmap(); }; } }
// Creates a HaarDetector object, parsing opencv xml storage file of which full path is given. public HaarDetector(string OpenCVXmlStorage) { HCascade = new HaarCascade(OpenCVXmlStorage); }
// Creates a HaarDetector object, parsing given xml document. This constructor can be used for loading embedded cascades. public HaarDetector(XmlDocument XmlDoc) { HCascade = new HaarCascade(XmlDoc); }
public Face(Image<Bgr, Byte> img, Rectangle rect) { _image = img; _rect = rect; _eyeCascade = new HaarCascade("haarcascade_eye_tree_eyeglasses.xml"); }
public FaceDetector() { _faceCascade = new HaarCascade("haarcascade_frontalface_alt2.xml"); }
public FrameProcessor() { _haar = new HaarCascade( "..\\..\\haarcascade_frontalface_alt2.xml"); sw = new Stopwatch(); Reset(); }