/// <summary>
    /// Recognise a Grayscale Image using the trained Eigen Recogniser
    /// </summary>
    /// <param name="Input_image"></param>
    /// <returns></returns>
    public string Recognise(Image <Gray, byte> Input_image, int Eigen_Thresh = -1)
    {
        if (_IsTrained)
        {
            FaceRecognizer.PredictionResult ER = recognizer.Predict(Input_image);

            if (ER.Label == -1)
            {
                Eigen_label    = "Unknown";
                Eigen_Distance = 0;
                return(Eigen_label);
            }
            else
            {
                Eigen_label    = Names_List[ER.Label];
                Eigen_Distance = (float)ER.Distance;
                if (Eigen_Thresh > -1)
                {
                    PCAThreshold = Eigen_Thresh;
                }

                return(Eigen_label); //the threshold set in training controls unknowns
            }
        }
        else
        {
            return("");
        }
    }
        public string Recognise(Image <Gray, byte> Input_image, int Eigen_Thresh = -1)
        {
            try
            {
                if (_IsTrained)
                {
                    Set_Eigen_Threshold = recognizeTreshold;
                    FaceRecognizer.PredictionResult ER = recognizer.Predict(Input_image);
                    Console.WriteLine(ER.Label);
                    if (ER.Label == -1)
                    {
                        Eigen_label    = "UnknownNull";
                        Eigen_Distance = 0;
                        return(Eigen_label + " " + Eigen_Distance.ToString());
                    }
                    else
                    {
                        Eigen_label    = allname[ER.Label];
                        Eigen_Distance = (float)ER.Distance;


                        return(Eigen_label + " " + Eigen_Distance.ToString());
                    }
                }
                else
                {
                    return("");
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                return("");
            }
        }
Exemple #3
0
        private string FaceRecognition(Image <Gray, Byte> image, int Eigen_Thresh = -1)
        {
            if (imageList.Size != 0)
            {
                //Eigen Face Algorithm
                FaceRecognizer.PredictionResult result = recognizer.Predict(image);

                if (result.Label == -1)
                {
                    recognitionName       = "Unknown";
                    Config.Eigen_Distance = 0;
                    return(recognitionName);
                }
                else
                {
                    recognitionName       = nameList[result.Label];
                    Config.Eigen_Distance = (float)result.Distance;
                    if (Eigen_Thresh > -1)
                    {
                        Config.Threshold = Eigen_Thresh;
                    }

                    if (Config.Eigen_Distance < Config.Threshold)
                    {
                        return(recognitionName);
                    }
                    else
                    {
                        return("Unknown");
                    }
                }
            }

            return("");
        }
Exemple #4
0
        public string RecognizeFace(byte[] photoByteArray)
        {
            try
            {
                if (_eigen == null)
                {
                    throw new Exception(ConfigurationManager.AppSettings["RecognizerError"]);
                }

                Image <Bgr, byte> photo = photoByteArray.ByteArrayToImage();

                IFaceDetectionService faceDetectionService = new FaceDetectionService();
                Rectangle[]           faces = faceDetectionService.DetectFacesAsRect(photo);

                ICollection <string> recognizedNames = new List <string>();

                foreach (Rectangle faceRectangle in faces)
                {
                    var face = photo.Copy(faceRectangle).ConvertToRecognition();
                    FaceRecognizer.PredictionResult result = _eigen.Predict(face);

                    if (result.Label > 0)
                    {
                        recognizedNames.Add(_FacesNamesArray[result.Label - 1]);
                    }
                }
                return(string.Join(Environment.NewLine, recognizedNames));
            }
            catch (Exception)
            {
                throw;
            }
        }
        public int MatchedFace(Image <Gray, Byte> face, ref double distance)
        {
            FaceRecognizer.PredictionResult pr = new FaceRecognizer.PredictionResult();
            try
            {
                string dbPath = Path.Combine(Directory.GetCurrentDirectory(), DataBase.facesPath);
                //if (Directory.Exists(dbPath))
                //    dbPath = dbPath;

                //FisherFaceRecognizer recognizer = new FisherFaceRecognizer(/*DataBase.facesPath*/);
                //recognizer.Train( .Load(dbPath);
                pr = recognizer.Predict(face);//.Resize(100, 100, Emgu.CV.CvEnum.Inter.Cubic));

                distance = pr.Distance;

                if (recognizer is EigenFaceRecognizer)
                {
                    return /*pr.Label;*/ ((pr.Distance > 2500) ? pr.Label : -1);
                }
                else if (recognizer is LBPHFaceRecognizer)
                {
                    return /*pr.Label;*/ (pr.Distance < 250 ? pr.Label : -1);
                }
                else
                {
                    return /*pr.Label;*/ (pr.Distance > 300 ? pr.Label : -1);
                }
            }
            catch (Exception ex)
            {
                // if no person is registered
                MessageBox.Show(ex.Message, "Matched Face");
            }
            return(-1);
        }
Exemple #6
0
 private void Button2_Click(object sender, EventArgs e)
 {
     using (Image <Gray, byte> frameImage = new Image <Gray, byte>(this.capture.QueryFrame().Bitmap))
     {
         bool flag = frameImage != null;
         if (flag)
         {
             Rectangle[] faces = this.cascadeClassifierForFace.DetectMultiScale(frameImage, 1.1, 12, Size.Empty, default(Size));
             bool        flag2 = faces.Length != 1;
             if (flag2)
             {
                 this.label1.Text = "未检出人脸";
             }
             else
             {
                 Image <Gray, byte> face = frameImage.GetSubRect(faces[0]);
                 this.faceRecognizer.Read("face.xml");
                 face._EqualizeHist();
                 FaceRecognizer.PredictionResult result = this.faceRecognizer.Predict(face.Resize(100, 100, Inter.Cubic));
                 Console.WriteLine(result.Label);
                 string path = Directory.GetDirectories("img")[result.Label];
                 this.label1.Text = string.Format("当前用户是:{0},差异度:{1}", path.Split(new char[]
                 {
                     '\\'
                 }).Last <string>(), result.Distance);
             }
         }
     }
 }
Exemple #7
0
        private Person CheckRecognizeResults(FaceRecognizer.PredictionResult result, int threshold)
        {
            // @param threshold should usually be in [0, 5000]
            string EigenLabel;
            float  EigenDistance = -1;

            if (result.Label == -1)
            {
                EigenLabel    = "Unknown";
                EigenDistance = 0;
                return(null);
            }
            else
            {
                EigenLabel    = result.Label.ToString();
                EigenDistance = (float)result.Distance;
                //EigenLabel = EigenDistance > threshold ? "Unknown" : result.Label.ToString();
                if (EigenDistance < threshold)
                {
                    return(Storage.FindPersonByID(result.Label));
                }
            }

            return(null);
            //return EigenLabel;// + '\n' + "Distance: " + EigenDistance.ToString();
        }
Exemple #8
0
        internal List <Image <Gray, byte> > DetectFace(Image <Bgr, byte> image, int width, int height, out int count)
        {
            var faces = _cascadeClassifier.DetectMultiScale(image, 1.2, 10); //the actual face detection happens here

            count = faces.Length;
            List <Image <Gray, byte> > grayFaces = new List <Image <Gray, byte> >();

            Parallel.ForEach(faces, face => {
                int xPos     = face.X;
                int yPos     = face.Y;
                var grayFace = image.Copy(face).Resize(width, height, Inter.Cubic).Convert <Gray, byte>();
                grayFaces.Add(grayFace);
                // grayFace._EqualizeHist();
                image.Draw(face, new Bgr(Color.LightBlue), 3);
                if (IsTraining.Equals(false) && _recognizer != null)
                {
                    FaceRecognizer.PredictionResult result = _recognizer.Predict(grayFace);
                    // float result = svmModel.Predict(grayFace);
                    if (result.Label != -1 && faceMapping.ContainsKey(result.Label))
                    {
                        string message = faceMapping[result.Label];
                        DrawText(message, image, xPos, yPos);
                        Console.WriteLine("[" + result.Distance + "] " + message);
                    }
                }
            });
            return(grayFaces);
        }
Exemple #9
0
        public string Recognize(Image <Bgr, Byte> display)
        {
            Rectangle[]        faces = cascade.DetectMultiScale(display.Convert <Gray, Byte>(), 1.2, 0);
            Image <Gray, Byte> faceImage;

            try
            {
                faceImage = display.Convert <Gray, Byte>().Copy(faces[0]).Resize(100, 100, Emgu.CV.CvEnum.Inter.Cubic);
            }
            catch (IndexOutOfRangeException e)
            {
                Console.WriteLine(e.Message);
                return(null);
            }

            FaceRecognizer.PredictionResult result = recognizer.Predict(faceImage);

            //For testing purpose
            Console.WriteLine(result.Distance);

            if (result.Distance <= 3000)
            {
                return(namesList.ElementAt(result.Label / 5));
            }
            else
            {
                return(null);
            }
        }
        public ShellViewModel(Config config)
        {
            this.config    = config;
            Title          = "Face Recognition";
            timer.Interval = TimeSpan.FromMilliseconds(config.TimerResponseValue);
            timer.Tick    += (sender, args) =>
            {
                ProcessFrame();
            };

            if (imageList.Size != 0)
            {
                //Eigen Face Algorithm
                FaceRecognizer.PredictionResult result = recognizer.Predict(detectedFace.Resize(100, 100, Inter.Cubic));
                FaceName          = nameList[result.Label];
                cameraCaptureFace = detectedFace.ToBitmap();
            }
            else
            {
                FaceName     = "Please Add Face";
                detectedFace = null;
            }



            GetFacesList(config);
            videoCapture = new VideoCapture(config.ActiveCameraIndex);
            videoCapture.SetCaptureProperty(CapProp.Fps, 30);
            videoCapture.SetCaptureProperty(CapProp.FrameHeight, 450);
            videoCapture.SetCaptureProperty(CapProp.FrameWidth, 370);
            timer.Start();
        }
Exemple #11
0
        private void FaceRecognition(Image <Gray, Byte> detectedFaceImage, Rectangle face, int currentFaceIndex)
        {
            string recongnizedFaceName = string.Empty;

            if (ImageList.Size != 0)
            {
                FaceRecognizer.PredictionResult result = FaceRecognizer.Predict(detectedFaceImage.Resize(148, 148, Inter.Cubic));
                recongnizedFaceName = NamesList[result.Label];
            }
            else
            {
                recongnizedFaceName = "Noma'lum shaxs";
            }
            CurrentImage.Draw(recongnizedFaceName, new Point(face.X - 2, face.Y - 2), FontFace.HersheyDuplex, 0.5, new Bgr(Color.LightGreen));
            Bitmap CameraCaptureFace = detectedFaceImage.ToBitmap();

            switch (currentFaceIndex)
            {
            case 0:
                pbDetectedFace0.Image   = CameraCaptureFace;
                txtRecognizedFace0.Text = recongnizedFaceName;
                break;

            case 1:
                pbDetectedFace1.Image   = CameraCaptureFace;
                txtRecognizedFace1.Text = recongnizedFaceName;
                break;

            default:
                break;
            }
        }
Exemple #12
0
        internal Image <Bgr, byte> DetectFace(Mat frame, int width, int height)
        {
            var image = frame.ToImage <Bgr, byte>();
            var faces = _cascadeClassifier.DetectMultiScale(image, 1.2, 10); //the actual face detection happens here

            for (var i = 0; i < faces.Length; i++)
            {
                var face     = faces[i];
                int xPos     = face.X;
                int yPos     = face.Y;
                var grayFace = image.Copy(face).Resize(width, height, Inter.Cubic).Convert <Gray, byte>();
                // grayFace._EqualizeHist();
                image.Draw(face, new Bgr(Color.LightBlue), 3);
                if (IsTraining.Equals(false) && _recognizer != null)
                {
                    FaceRecognizer.PredictionResult result = _recognizer.Predict(grayFace);
                    // float result = svmModel.Predict(grayFace);
                    if (result.Label != -1 && faceMapping.ContainsKey(result.Label))
                    {
                        string message = faceMapping[result.Label];
                        DrawText(message, image, xPos, yPos);
                        Console.WriteLine("[" + result.Distance + "] " + message);
                    }
                    else
                    {
                        Console.Write(".");
                    }
                }
            }
            return(image);
        }
 public FaceRecognizer.PredictionResult Who(Image <Gray, Byte> face)
 {
     FaceRecognizer.PredictionResult result = recognizerEMGUCV.Predict(face);
     if (result.Label != -1)
     {
         result.Label = (int)trainingImage.mapToCount[result.Label];
     }
     return(result);
 }
 public int RecognizeFace(Image <Gray, byte> grayImage)
 {
     FaceRecognizer.PredictionResult result = recognizer.Predict(grayImage.Resize(100, 100, Inter.Cubic));
     if (result.Label != -1)
     {
         return(result.Label);
     }
     return(-1);
 }
Exemple #15
0
        private void button2_Click(object sender, EventArgs e)
        {
            learn();
            string name;
            int    resultat = 0;

            using (var imageFrame = capture.QuerySmallFrame().ToImage <Bgr, Byte>())
            {
                if (imageFrame != null)
                {
                    var  grayframe  = imageFrame.Convert <Gray, byte>();
                    var  Faces      = cascadeClassifier.DetectMultiScale(grayframe, 1.1, 10, Size.Empty); //the actual face detection happens here
                    var  grayframe2 = grayframe.Resize(400, 400, interpolationType: Inter.Cubic);
                    bool found      = false;
                    foreach (var face in Faces)
                    {
                        imageFrame.Draw(face, new Bgr(Color.BurlyWood), 3); //the detected face(s) is highlighted here using a box that is drawn around it/them
                        FaceRecognizer.PredictionResult pre = recognizer.Predict(grayframe2);



                        if (pre.Label != -1)
                        {
                            for (int j = 0; j < faces.Count; j++)
                            {
                                if (Int32.Parse(faces[j].cin) == pre.Label)
                                {
                                    login.ValidatedEns.cin           = faces[j].cin;
                                    login.ValidatedEns.mots_de_passe = faces[j].mots_de_passe;
                                    login.ValidatedEns.nom           = faces[j].nom;
                                    login.ValidatedEns.prenom        = faces[j].prenom;
                                    login.ValidatedEns.mail          = faces[j].mail;
                                    login.ValidatedEns.photo         = faces[j].photo;
                                    login.ValidatedEns.code_a_bar    = faces[j].code_a_bar;
                                    found = true;
                                    break;
                                }
                            }
                        }
                        else
                        {
                            MessageBox.Show("aucun similaire dans la base !");
                            break;
                        }
                        if (found)
                        {
                            Accueil_Enseignant ac = new Accueil_Enseignant();
                            ac.Show();
                            Close();
                            break;
                        }
                    }

                    pictureBox2.Image = imageFrame.ToBitmap();
                }
            }
        }
        public void recognize_and_draw(DrawingContext dc, ref WriteableBitmap color_frame, int display_width, int display_height)
        {
            if (counter % 5 == 0)
            {
                // Get the current frame
                this.frame          = writable_bitmap_to_image(color_frame);
                this.small_frame    = this.frame.Convert <Gray, byte>().Resize(input_width, input_height, Inter.Cubic);
                this.faces_detected = face_finder.DetectMultiScale(small_frame, 1.5, 10, this.face_size);
            }
            // for each face detected
            foreach (System.Drawing.Rectangle f in faces_detected)
            {
                Rect outline = conv_rectangle(f, display_width, display_height);
                dc.DrawRectangle(null, face_outline_pen, outline);

                Image <Gray, byte> face = small_frame.Copy(f).Resize(100, 100, Inter.Cubic);

                if (training_images.Count == 0)
                {
                    add_new_person(frame, face);
                }

                FaceRecognizer.PredictionResult pred = face_recognizer.Predict(face);

                string name;
                if (pred.Distance < recognized_threshold)
                {
                    name = training_labels[pred.Label];
                    if (pred.Distance > add_new_training_threshold)
                    {
                        add_training_image(face, pred.Label);
                    }
                }
                else
                {
                    int new_label = add_new_person(frame, face);
                    name = training_labels[new_label];
                }
                // Debug.WriteLine("{0} {1} {2}", training_labels[pred.Label], pred.Label, pred.Distance);

                // Draw the label for each face detected and recognized
                dc.DrawText(

                    new FormattedText(
                        name,
                        mainWindow.cultureInfo,
                        FlowDirection.LeftToRight,
                        mainWindow.font,
                        mainWindow.drawTextFontSize,
                        Brushes.White
                        ),
                    conv_point(f.X, f.Y, display_width, display_height)
                    );
            }
            counter++;
        }
Exemple #17
0
        private void Cap_ImageGrabbed(object sender, EventArgs e)
        {
            for (int i = 0; i < tfstudent.Count; ++i)
            {
                tfstudent[i] = false;
            }
            FaceRecognizer.PredictionResult predictedLabel = new FaceRecognizer.PredictionResult();
            CascadeClassifier fcas = new CascadeClassifier(fcasname);
            Mat img = new Mat(), imgg = new Mat();

            cap.Retrieve(img);
            image = img.ToImage <Bgr, byte>();
            List <System.Drawing.Rectangle> faces = new List <System.Drawing.Rectangle>();

            CvInvoke.CvtColor(img, imgg, ColorConversion.Bgr2Gray);
            CvInvoke.EqualizeHist(imgg, imgg);
            System.Drawing.Rectangle[] facedetect = fcas.DetectMultiScale(imgg, 1.1, 10, new System.Drawing.Size(20, 20));
            faces.AddRange(facedetect);
            Mat             s_img = new Mat();
            List <coord_id> hs    = new List <coord_id>();

            foreach (System.Drawing.Rectangle f in faces)
            //Parallel.ForEach(faces,(f) =>
            {
                Image <Gray, byte> image2 = new Image <Gray, byte>(image.ToBitmap());
                image2.ROI = f;
                //image2.ToBitmap(100, 100).Save(junk.ToString()+".jpg", ImageFormat.Jpeg);
                image2.Resize(MainWindow.widthheight, MainWindow.widthheight, Emgu.CV.CvEnum.Inter.Linear, false);
                s_img          = image2.Mat;
                predictedLabel = face.Predict(s_img);
                //Ghi(predictedLabel.Label.ToString(), 1);
                //Dispatcher.BeginInvoke(new ThreadStart(() => textbox2.Text = junk.ToString()));
                try
                {
                    tfstudent[predictedLabel.Label] = true;
                    hs.Add(new coord_id(predictedLabel.Label, f));
                }
                catch
                {
                    image.Draw(f, new Bgr(0, 183, 149), 15);
                    // Dispatcher.BeginInvoke(new ThreadStart(() => textbox1.Text = predictedLabel.Label.ToString()));
                    continue;
                }
            }
            //++junk;
            if (hs.Count > 0)
            {
                checkseat(hs);
            }
            Dispatcher.Invoke(() =>
            {
                Small_Camera.Source = CreateBitmapSourceFromGdiBitmap(image.Flip(FlipType.Horizontal).Bitmap);
            });
            //Thread.Sleep(500);
        }
Exemple #18
0
 public int RecogniseReturnLabel(Image <Gray, byte> Input_image, int Eigen_Thresh = -1)
 {
     try {
         recognizer.Train(TrainingImages.ToArray(), ImageLabels.ToArray());
         FaceRecognizer.PredictionResult ER = recognizer.Predict(Input_image);
         return(ER.Label);
     }
     catch
     {
         return(-1);
     }
 }
Exemple #19
0
        public String[] Recognize()
        {
            // Reset Names and Areas
            Array.Clear(RecognizeNames, 0, RecognizeNames.Length);
            for (int i = 0; i < RecognizeArea.Length; i++)
            {
                RecognizeArea[i].X      = 0;
                RecognizeArea[i].Y      = 0;
                RecognizeArea[i].Width  = 0;
                RecognizeArea[i].Height = 0;
            }

            // Prevent unknown faces
            if (null == DetectFaces || null == GrayImage)
            {
                return(null);
            }

            // Set names and Areas
            for (int i = 0; i < DetectFaces.Length && i < RecognizeNames.Length; i++)
            {
                // Build a thumbnail
                RecognizeThumbs[i] = GrayImage.Copy(DetectFaces[i]).Resize(100, 100, Emgu.CV.CvEnum.Inter.Cubic);
                RecognizeThumbs[i]._EqualizeHist();

                // Crop first only if not trained
                if (!trained)
                {
                    return(RecognizeNames);
                }

                // Recognize
                FaceRecognizer.PredictionResult ER = Recognizer.Predict(RecognizeThumbs[i]);

                RecognizeNames[i] = UNKNOWN;
                if (ER.Label >= 0)
                {
                    RecognizeDistance = (float)ER.Distance;
                    if (RecognizeDistance > Threshold)
                    {
                        RecognizeNames[i] = trainedLabels[ER.Label];

                        // Build area according to ratio
                        Rectangle r = DetectFaces[i];
                        RecognizeArea[i].X      = r.X * ratio;
                        RecognizeArea[i].Y      = r.Y * ratio;
                        RecognizeArea[i].Width  = r.Width * ratio;
                        RecognizeArea[i].Height = r.Height * ratio;
                    }
                }
            }
            return(RecognizeNames);
        }
    /// <summary>
    /// Recognise a Grayscale Image using the trained Eigen Recogniser
    /// </summary>
    /// <param name="Input_image"></param>
    /// <returns></returns>
    public string Recognise(Image <Gray, byte> Input_image, int Eigen_Thresh = -1)
    {
        if (_IsTrained)
        {
            FaceRecognizer.PredictionResult ER = recognizer.Predict(Input_image);

            if (ER.Label == -1)
            {
                label    = "Unknown";
                Distance = 0;
                return(label);
            }
            else
            {
                label    = Names_List[ER.Label];
                Distance = (float)ER.Distance;
                if (Eigen_Thresh > -1)
                {
                    Eigen_threshold = Eigen_Thresh;
                }

                //Only use the post threshold rule if we are using an Eigen Recognizer
                //since Fisher and LBHP threshold set during the constructor will work correctly
                switch (Recognizer_Type)
                {
                //old variant 25.03

                /*case ("EMGU.CV.EigenFaceRecognizer"):
                 *      if (Distance > Eigen_threshold) return Eigen_label;
                 *      else return "Unknown";*/
                case ("EMGU.CV.EigenFaceRecognizer"):
                    if (Distance > Eigen_threshold)
                    {
                        return(label);
                    }
                    else
                    {
                        return("Unknown");
                    }

                case ("EMGU.CV.LBPHFaceRecognizer"):
                case ("EMGU.CV.FisherFaceRecognizer"):
                default:
                    return(label); //the threshold set in training controls unknowns
                }
            }
        }
        else
        {
            return("");
        }
    }
Exemple #21
0
        //----------------------------------------------------------------------------//
        //<<<<<<<<------FUNCTION USED TO DETECT AND RECOGNIZE FACES---------->>>>>>>>
        //----------------------------------------------------------------------------//
        private void DetectAndRecognizeFaces()
        {
            Image <Gray, byte> grayframe = TestImage.Convert <Gray, byte>();

            //Assign user-defined Values to parameter variables:
            MinNeighbors      = int.Parse(comboBoxMinNeigh.Text);    // the 3rd parameter
            WindowsSize       = int.Parse(textBoxWinSiz.Text);       // the 5th parameter
            ScaleIncreaseRate = Double.Parse(comboBoxScIncRte.Text); //the 2nd parameter

            //detect faces from the gray-scale image and store into an array of type 'var',i.e 'MCvAvgComp[]'
            var faces = haar.DetectMultiScale(grayframe, ScaleIncreaseRate, MinNeighbors);

            MessageBox.Show("Total Faces Detected: " + faces.Length.ToString());

            Bitmap   BmpInput = grayframe.ToBitmap();
            Bitmap   ExtractedFace; // an empty "box"/"image" to hold the extracted face.
            Graphics g;

            //MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d);

            //draw a green rectangle on each detected face in image
            foreach (Rectangle face in faces)
            {
                //locate the detected face & mark with a rectangle
                TestImage.Draw(face, new Bgr(Color.Green), 3);
                CamImageBox.Image = TestImage;
                //set the size of the empty box(ExtractedFace) which will later contain the detected face
                ExtractedFace = new Bitmap(face.Width, face.Height);

                //assign the empty box to graphics for painting
                g = Graphics.FromImage(ExtractedFace);
                //graphics fills the empty box with exact pixels of the face to be extracted from input image
                g.DrawImage(BmpInput, 0, 0, face, GraphicsUnit.Pixel);
                try
                {
                    Image <Bgr, byte>  input          = new Image <Bgr, byte>(new Bitmap(ExtractedFace));
                    Image <Gray, byte> modinput       = input.Convert <Gray, byte>().Resize(64, 64, Emgu.CV.CvEnum.Inter.Cubic);
                    FaceRecognizer.PredictionResult a = recog.Predict(modinput);
                    MessageBox.Show(a.Label.ToString() + " " + a.Distance.ToString());
                }
                catch (Exception e)
                {
                    MessageBox.Show(e.Message);
                }
            }


            //Display the detected faces in imagebox
            CamImageBox.Image = TestImage;

            //MessageBox.Show(faces.Length.ToString()+ " Face(s) Extracted sucessfully!");
        }
Exemple #22
0
        public int RecognizeFace(Image <Gray, byte> image)
        {
            EigenFaceRecognizer eigen = OldEigen();

            FaceRecognizer.PredictionResult result = eigen.Predict(image);

            Console.WriteLine("ID: " + result.Label + ", " + "Threshold:  " + result.Distance);
            if (result.Label != -1 && result.Distance < treshold)//int.Parse(ConfigurationManager.AppSettings["RecognizerThreshold"]))
            {
                return(result.Label);
            }
            return(0);
        }
Exemple #23
0
 private static string GetUserName(FaceRecognizer.PredictionResult PR)
 {
     if (PR.Label == -1)
     {
         return("Unknown");
     }
     else
     {
         StudentBusiness StuBO   = new StudentBusiness();
         var             Student = StuBO.GetStudentByID(PR.Label);
         return(Student.StudentCode + " - " + Student.FullName);
     }
 }
 private void FaceRecognition()
 {
     if (imageList.Size != 0)
     {
         //Eigen Face Algorithm
         FaceRecognizer.PredictionResult result = recognizer.Predict(detectedFace.Resize(100, 100, Inter.Cubic));
         FaceName          = nameList[result.Label];
         CameraCaptureFace = detectedFace.ToBitmap();
     }
     else
     {
         FaceName = "Please Add Face";
     }
 }
Exemple #25
0
        /// <summary>
        /// Recognize a Grayscale Image using the trained Eigen Recognizer
        /// </summary>
        /// <param name="Input_image"></param>
        /// <returns></returns>
        public string Recognize(Image <Gray, byte> Input_image, int Eigen_Thresh = -1)
        {
            if (_IsTrained)
            {
                FaceRecognizer.PredictionResult ER = recognizer.Predict(Input_image);

                if (ER.Label == -1)
                {
                    Eigen_label    = "Unknown";
                    Eigen_Distance = 0;
                    return(Eigen_label);
                }
                else
                {
                    Eigen_label    = Rfid_List[ER.Label];
                    Eigen_Distance = (float)ER.Distance;
                    Debug.WriteLine("DISTANCE: " + Eigen_Distance);
                    if (Eigen_Thresh > -1)
                    {
                        Eigen_threshold = Eigen_Thresh;
                    }

                    //if (trainingImages.Count() < 30) return Eigen_label;

                    //Only use the post threshold rule if we are using an Eigen Recognizer
                    //since Fisher and LBHP threshold set during the constructor will work correctly
                    switch (Recognizer_Type)
                    {
                    case ("EMGU.CV.EigenFaceRecognizer"):
                        if (Eigen_Distance < Eigen_threshold)
                        {
                            return(Eigen_label);
                        }
                        else
                        {
                            return("Unknown");
                        }

                    case ("EMGU.CV.LBPHFaceRecognizer"):
                    case ("EMGU.CV.FisherFaceRecognizer"):
                    default:
                        return(Eigen_label);    //the threshold set in training controls unknowns
                    }
                }
            }
            else
            {
                return("");
            }
        }
Exemple #26
0
    //Yüz  tanıma işlemini gerçekleştirecek olan metot(**)
    public string Recognition(Image <Gray, byte> Input_image)
    {
        if (_DizinKontrol)//dizin kontrol değeri true ise tanıma işlemine geçilir
        {
            //Tanıma işlemini yapar. predict fonksiyonu parametre olarak tanınması istenen yüzü girdi olarak alır.(**)
            FaceRecognizer.PredictionResult ER = recognizer.Predict(Input_image);

            //eğer tanıma işleminin sonucu -1 çıkarsa kameradan algılanan kişinin tanınmadığı anlamına gelir.(**)
            if (ER.Label == -1)
            {
                AdSoyad_label = "TANINMADI";
                YuzDistance   = 0;
                return(AdSoyad_label); //eğer yüz tanınmadıysa "TANINMADI" şeklinde mesaj gönderilir.(**)
            }
            else
            {
                AdSoyad_label = AdSoyadList[ER.Label]; //eğer yüz tanındıysa tanınan kişinin ad soyad bilgisini aldı(**)
                YuzDistance   = (float)ER.Distance;    //tanınan yüzün değeri YuzDistance değişkenine atanır.(**)


                // if (Eigen_Thresh > -1) OzyuzEsikDeger = Eigen_Thresh;//yorum satırı yap(**)

                //eigenfaces algoritması kullanıyorsak eşik değerini kullanırız.(**)
                //TanimaTuru değişkeni form ekranında seçtiğimiz tanıma yöntemine göre değişmekte. o yüzden public tanımlamıştım.(**)
                switch (TanimaTuru)
                {
                case ("EMGU.CV.EigenFaceRecognizer"):
                    //yüzün sayısal değeri eşik değerinden büyük ise ad soyad değeri döndürülür.(**)
                    if (YuzDistance > OzyuzEsikDeger)
                    {
                        return(AdSoyad_label);
                    }
                    else
                    {
                        return("TANINMADI");
                    }

                case ("EMGU.CV.LBPHFaceRecognizer"):
                case ("EMGU.CV.FisherFaceRecognizer"):
                default:
                    return(AdSoyad_label);
                }
            }
        }
        else
        {
            return("");
        }
    }
Exemple #27
0
        public static List <RecognitionModel> Recognize(IInputArray image, String uri)
        {
            if (training_sets.Exists(x => x.FullImageUri == uri))
            {
                return(new List <RecognitionModel>());
            }
            List <Rectangle> faces = new List <Rectangle>();
            //List<Rectangle> eyes = new List<Rectangle>();
            long detectionTime;

            Detect(image, "haarcascade_frontalface_default.xml", /*"haarcascade_eye.xml",*/ faces, /*eyes,*/ out detectionTime);
            List <RecognitionModel> results = new List <RecognitionModel>();

            foreach (Rectangle r in faces)
            {
                IImage face = new UMat((UMat)image, r);
                Directory.CreateDirectory("training_set\\");
                String path = "training_set\\" + Path.GetRandomFileName() + ".jpg";
                new Image <Bgr, byte>(face.Bitmap).Resize(100, 100, Inter.Cubic).Save(path);
                RecognitionModel rm = new RecognitionModel(0, path, uri, false);
                if (Trained)
                {
                    using (Image <Gray, Byte> f = new Image <Gray, byte>(face.Bitmap))
                    {
                        FaceRecognizer.PredictionResult predictionResults = Recognizer.Predict(f.Resize(100, 100, Inter.Cubic));
                        Console.WriteLine(predictionResults.Distance + "     " + predictionResults.Label);
                        rm.Distance = predictionResults.Distance;
                        if (predictionResults.Distance < 6000 && dic_labels.ContainsKey(predictionResults.Label))
                        {
                            rm.LabelInt = predictionResults.Label;
                        }
                        else
                        {
                            rm.LabelInt = 0;
                        }
                    }
                }
                else
                {
                    rm.LabelInt = 0;
                }

                results.Add(rm);
                training_sets.Add(rm);
            }
            return(results);
        }
Exemple #28
0
        private Image <Bgr, byte> DetectFace(Image <Bgr, byte> frameImage)
        {
            bool flag = frameImage != null;

            if (flag)
            {
                Image <Gray, byte> grayImage = frameImage.Convert <Gray, byte>();
                Rectangle[]        eyes      = this.cascadeClassifierForEye.DetectMultiScale(grayImage, 1.1, 12, Size.Empty, default(Size));
                foreach (Rectangle eye in eyes)
                {
                    frameImage.GrabCut(eye, 1);
                    frameImage.Draw(eye, new Bgr(Color.Red), 3, LineType.EightConnected, 0);
                }
                Rectangle[] faces  = this.cascadeClassifierForFace.DetectMultiScale(grayImage, 1.1, 12, Size.Empty, default(Size));
                Rectangle[] array2 = faces;
                for (int j = 0; j < array2.Length; j++)
                {
                    Rectangle          face  = array2[j];
                    Image <Gray, byte> face2 = grayImage.GetSubRect(face);
                    frameImage.GrabCut(face, 1);
                    frameImage.Draw(face, new Bgr(Color.Red), 3, LineType.EightConnected, 0);
                    face2._EqualizeHist();
                    FaceRecognizer.PredictionResult result = this.faceRecognizer.Predict(face2.Resize(100, 100, Inter.Cubic));
                    Console.WriteLine(result.Label);
                    string path = Directory.GetDirectories("img")[result.Label];
                    base.Invoke(new Action(delegate()
                    {
                        this.label1.Text = string.Format("当前用户是:{0},相似度:{1}", path.Split(new char[]
                        {
                            '\\'
                        }).Last <string>(), result.Distance);
                    }));
                    string txt = string.Format("{0},{1}", path.Split(new char[]
                    {
                        '\\'
                    }).Last <string>(), result.Distance);
                    Font       font     = new Font("宋体", 60f, GraphicsUnit.Pixel);
                    SolidBrush fontLine = new SolidBrush(Color.Blue);

                    float xPos = (float)(face.X + (face.Width / 2 - txt.Length * 14 / 2));
                    float yPos = (float)(face.Y - 21);
                    frameImage.Draw(txt, new Point((int)xPos, (int)yPos), FontFace.HersheyComplex, 1.0, new Bgr(Color.AntiqueWhite), 1, LineType.AntiAlias);
                }
            }
            return(frameImage);
        }
        public static String Recognize(Image <Gray, byte> source, int threshold = -1)
        {
            //if threshold parameter is set ( not default [-1] ), we use parameter value as threshold
            //else, use EigenThreshold's value as threshold (2000)
            if (threshold > -1)
            {
                EigenThreshold = threshold;
            }

            //normalize and resize image
            Image <Gray, byte> face = source.Resize(width, height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC, false);

            face._EqualizeHist();

            try
            {
                String eigenLabel = "";
                FaceRecognizer.PredictionResult result = faceRecognizer.Predict(face);
                if (result.Label == -1)
                {
                    return("Unknown -1");
                }

                else
                {
                    float eigenDistance;
                    //result is INT, we need STRING
                    eigenLabel    = labels[result.Label];
                    eigenDistance = (float)result.Distance;
                    if (eigenDistance > EigenThreshold)
                    {
                        return(eigenLabel);
                    }
                    else
                    {
                        return("Unknown " + eigenDistance);
                    }
                }
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
            }

            return("Unknown -2");
        }
        /// <summary>
        /// شناسایی چهره
        /// </summary>
        /// <param name="bitmap">تصویر حاوی صورت</param>
        /// <returns>شناسه</returns>
        public string RecognizeFace(Bitmap bitmap)
        {
            Console.WriteLine("[{0}] Recognizing face...", DateTime.Now.ToString("HH:mm:ss"));
            ///[Recognizing face]
            FaceRecognizer.PredictionResult predicitonResult = m_faceRecognizer.Predict(new Image <Gray, byte>(bitmap));
            if (predicitonResult.Label == -1)
            {
                return(string.Empty);
            }

            string id = FaceRepository.GetId(predicitonResult.Label);

            ///[Recognizing face]
            if (!string.IsNullOrEmpty(id))
            {
                Console.WriteLine("[{0}] face id {1} was recognized...", DateTime.Now.ToString("HH:mm:ss"), id);
            }
            return(id);
        }