public ACFaceRecognition() { // Максимальное к-во лиц на фото - 30. MCvAvgComp[][] facesDetected = new MCvAvgComp[MAX_FACES_IN_IMAGE][]; lIntDatabaseImagesLabels = new List<int>(); lStringDatabaseImagesLabels = new List<string>(); }
private static void DetectAndDrawEyes(Image<Bgr, byte> image, Image<Gray, byte> gray, MCvAvgComp f, HaarCascade eye) { gray.ROI = f.rect; MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade( eye, 1.1, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); gray.ROI = Rectangle.Empty; foreach (MCvAvgComp e in eyesDetected[0]) { Rectangle eyeRect = e.rect; eyeRect.Offset(f.rect.X, f.rect.Y); image.Draw(eyeRect, new Bgr(Color.Red), 2); } }
/// <summary> /// The get mouth. /// </summary> /// <param name="frame"> /// The frame. /// </param> /// <param name="face"> /// The face. /// </param> /// <returns> /// The <see cref="MCvAvgComp"/>. /// </returns> private MCvAvgComp GetMouth(Image<Bgr, byte> frame, MCvAvgComp face) { var grayFrame = frame.Convert<Gray, byte>(); var mouthArea = new Rectangle( face.rect.X, face.rect.Y + (face.rect.Height / 2), face.rect.Width, face.rect.Height / 2); grayFrame.ROI = mouthArea; var mouthsDetected = this.mouthCascade.Detect(grayFrame); var mouth = mouthsDetected[0]; foreach (var possibleMouth in mouthsDetected) { if ((possibleMouth.rect.Width * possibleMouth.rect.Height) > mouth.rect.Width * mouth.rect.Height) { mouth = possibleMouth; } } return mouth; }
/// <summary> /// The draw mouth. /// </summary> /// <param name="frame"> /// The frame. /// </param> /// <param name="face"> /// The face. /// </param> /// <param name="mouth"> /// The mouth. /// </param> /// <param name="color"> /// The color. /// </param> private void DrawMouth(Image<Bgr, byte> frame, MCvAvgComp face, MCvAvgComp mouth, Color color) { var mouthRect = mouth.rect; mouthRect.Offset(face.rect.X, face.rect.Y + (face.rect.Height / 2)); frame.Draw(mouthRect, new Bgr(color), 2); }
private string recognizerall(MCvAvgComp f) { string[] ten = new string[5]; ten[0] = ""; if (trainingImages.ToArray().Length != 0) { // /Term Criteria for face recognition with numbers of trained images like max Iteration,eps > =>chinh xac MCvTermCriteria termCrit = new MCvTermCriteria(tong, 0.6); MCvTermCriteria termCritn = new MCvTermCriteria(tong, 0.7); MCvTermCriteria termCritm = new MCvTermCriteria(tong, 0.7); MCvTermCriteria termCriteL = new MCvTermCriteria(tong, 0.7); MCvTermCriteria termCriteR = new MCvTermCriteria(tong, 0.7); //Eigen face recognizer EigenObjectRecognizer recognizer = new EigenObjectRecognizer( trainingImages.ToArray(), labels.ToArray(), 2000, ref termCrit); ten[0] = recognizer.Recognize(resultface); /* ///////////////////////////////////////////////////kiem tra nose/ if (resultnose != null) { EigenObjectRecognizer recognizernose = new EigenObjectRecognizer( trainingImagenose.ToArray(), labels.ToArray(), 1000, ref termCritn); ten[1] = recognizernose.Recognize(resultnose); currentFrame.Draw("nose: "+ten[1], ref font, new Point(f.rect.X - 2, f.rect.Y - 15), new Bgr(Color.DarkBlue)); } ////////////////////////////////////////////////////////// if (resultmouth != null) { EigenObjectRecognizer recognizermouth = new EigenObjectRecognizer( trainingImagemouth.ToArray(), labels.ToArray(), 1000, ref termCritm); ten[2] = recognizermouth.Recognize(resultmouth); currentFrame.Draw("mouth: "+ten[2], ref font, new Point(f.rect.X - 2, f.rect.Y - 30), new Bgr(Color.LightGreen)); } if (resulteyeL != null) { EigenObjectRecognizer recognizereyeL = new EigenObjectRecognizer( trainingImageneyeL.ToArray(), labels.ToArray(), 1000, ref termCriteL); ten[3] = recognizereyeL.Recognize(resulteyeL); currentFrame.Draw("eyes: "+ten[3], ref font, new Point(f.rect.X - 45, f.rect.Y - 45), new Bgr(Color.LightGreen)); } if (resulteyeR != null) { EigenObjectRecognizer recognizereyeR = new EigenObjectRecognizer( trainingImageneyeR.ToArray(), labels.ToArray(), 600, ref termCriteR); ten[4] = recognizereyeR.Recognize(resulteyeR); currentFrame.Draw(ten[4], ref font, new Point(f.rect.X +65, f.rect.Y - 45), new Bgr(Color.LightGreen)); } } int tam = 0; string name=""; for (int i = 1; i < 5; i++) { if (ten[0] == ten[i]) tam++; if (tam > 2&&ten[0]!=null) { name = ten[0]; break; } else name = ""; } */ } return ten[0]; }
private bool GetCoordination(MCvAvgComp acHum) { if (!CB_Parallel.Checked) { return BitAnalysis.StartDye(acHum.rect.X, acHum.rect.Y, acHum.rect.Width, acHum.rect.Height, imgGray); // Analyze in Haar detection } else { return BitAnalysis_P.StartDye(acHum.rect.X, acHum.rect.Y, acHum.rect.Width, acHum.rect.Height, imgGray); } }
public DetectorResult(MCvAvgComp[] results, int factor, Image<Bgr, byte> image) { this.results = results; this.factor = factor; this.Image = image; }
public DetectorResult(MCvAvgComp[] results, int factor) { this.results = results; this.factor = factor; }
public DetectorResult(MCvAvgComp[] results) { this.results = results; }
/// <summary> /// This method returns the 2 ROIs that are more "aligned", i.e. with the /// lowest difference in the Y coordinate /// It calculates the distances between all the ROIs and selects the appropriate pair /// </summary> /// <param name="ROIs"></param> /// <returns></returns> private MCvAvgComp[] AlignedROIs(MCvAvgComp[] ROIs) { MCvAvgComp[] alignedROIs = new MCvAvgComp[2]; int N = ROIs.Length; Matrix<int> distancesY = new Matrix<int>(N, N); distancesY = distancesY.Add(100000); double minimum; double maximum; Point minimumLocation; Point maximumLocation; for (int i = 0; i < N; i++) { for (int j = i + 1; j < N; j++) { // If both rectangles do not intersect, we add their distance to the matrix // MT: Min distance of 2 x roi.width if (ROIs[j].rect.IntersectsWith(ROIs[i].rect) == false && Math.Abs(ROIs[j].rect.X - ROIs[i].rect.X) > ROIs[j].rect.Width*2.5) distancesY[i, j] = Math.Abs(ROIs[j].rect.Y - ROIs[i].rect.Y); } } distancesY.MinMax(out minimum, out maximum, out minimumLocation, out maximumLocation); alignedROIs[0] = ROIs[minimumLocation.X]; alignedROIs[1] = ROIs[minimumLocation.Y]; return alignedROIs; }
private MCvAvgComp DrawEyes(MCvAvgComp f) { // Our Region of interest where find eyes will start with a sample estimation using face metric Int32 StartSearchEyes = f.rect.Top + (f.rect.Height * 3 / 11); Point startingPointSearchEyes = new Point(f.rect.X, StartSearchEyes); Point endingPointSearchEyes = new Point((f.rect.X + f.rect.Width), StartSearchEyes); Size searchEyesAreaSize = new Size(f.rect.Width, (f.rect.Height * 2 / 9)); Point lowerEyesPointOptimized = new Point(f.rect.X, StartSearchEyes + searchEyesAreaSize.Height); Size eyeAreaSize = new Size(f.rect.Width / 2, (f.rect.Height * 2 / 9)); Point startingLeftEyePointOptimized = new Point(f.rect.X + f.rect.Width / 2, StartSearchEyes); Rectangle possibleROI_eyes = new Rectangle(startingPointSearchEyes, searchEyesAreaSize); Rectangle possibleROI_rightEye = new Rectangle(startingPointSearchEyes, eyeAreaSize); Rectangle possibleROI_leftEye = new Rectangle(startingLeftEyePointOptimized, eyeAreaSize); #region Drawing Utilities // Let's draw our search area, first the upper line currentFrame.Draw(new LineSegment2D(startingPointSearchEyes, endingPointSearchEyes), new Bgr(Color.White), 3); // draw the bottom line currentFrame.Draw(new LineSegment2D(lowerEyesPointOptimized, new Point((lowerEyesPointOptimized.X + f.rect.Width), (StartSearchEyes + searchEyesAreaSize.Height))), new Bgr(Color.White), 3); // draw the eyes search vertical line currentFrame.Draw(new LineSegment2D(startingLeftEyePointOptimized, new Point(startingLeftEyePointOptimized.X, (StartSearchEyes + searchEyesAreaSize.Height))), new Bgr(Color.White), 3); // MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.6d, 0.6d); // frame.Draw("Search Eyes Area", ref font, new Point((startingLeftEyePointOptimized.X - 80), (StartSearchEyes + searchEyesAreaSize.Height + 15)), new Bgr(Color.Yellow)); // frame.Draw("Right Eye Area", ref font, new Point(startingPointSearchEyes.X, startingPointSearchEyes.Y - 10), new Bgr(Color.Aqua)); // frame.Draw("Left Eye Area", ref font, new Point(startingLeftEyePointOptimized.X + searchEyesAreaSize.Height / 2, startingPointSearchEyes.Y - 10), new Bgr(Color.Yellow)); #endregion return f; }
private IEnumerable<Image<Gray, byte>> TrainFaces(MCvAvgComp[][] facesDetected) { foreach (MCvAvgComp f in facesDetected[0]) { TrainedFace = currentFrame.Copy(f.rect).Convert<Gray, byte>(); yield return TrainedFace; } }
private void addToFlow(Image<Bgr, byte> result, MCvAvgComp f, string name) { PictureBox pbox = new PictureBox(); pbox.Tag = new object[] { new int[] { f.rect.X, f.rect.Y,f.rect.Width, f.rect.Height}, name }; pbox.Click += new EventHandler(pbox_Click); pbox.ContextMenuStrip = contextMenuStrip1; pbox.SizeMode = PictureBoxSizeMode.StretchImage; pbox.Height = 50; pbox.Width = 50; pbox.Cursor = Cursors.Hand; pbox.Image = result.Copy(f.rect).Bitmap; flowLayoutPanel1.Controls.Add(pbox); }