// Return a list of rectangles with detected faces private List <Rectangle> FaceDetection(string imageFileName) { string classifierData = @"%inetroot%\private\research\private\CollaborativeLibs_01\LibFaceDetect\FaceDetect\Classifier\classifier.txt"; float detectionThreshold = 0.0F; FaceDetector detector = new FaceDetector( classifierData, true, detectionThreshold); // Run Detection DetectionResult detectionResult = detector.DetectObject(imageFileName); List <ScoredRect> scoredResultList = detectionResult.GetMergedRectList(0.0F); if (scoredResultList.Count < 0) { return(null); } List <Rectangle> faceRects = new List <Rectangle>(); foreach (ScoredRect scoredRect in scoredResultList) { Rectangle rect = new Rectangle(); rect.X = scoredRect.X; rect.Y = scoredRect.Y; rect.Width = scoredRect.Width; rect.Height = scoredRect.Height; faceRects.Add(rect); } return(faceRects); }
static private int GenerateData(string imageFileName, List <FeaturePts> featureList) { if (null == imageFileName) { return(0); } DetectionResult detectionResult = _detector.DetectObject(imageFileName); List <ScoredRect> scoredResultList = detectionResult.GetMergedRectList(0.0F); int imageCount = 0; if (null != featureList && scoredResultList.Count > 0) { foreach (FeaturePts features in featureList) { System.Drawing.PointF point = (System.Drawing.PointF)features.ptLeftEye; Point leftEye = new Point(point.X, point.Y); point = (System.Drawing.PointF)features.ptRightEye; Point rightEye = new Point(point.X, point.Y); foreach (ScoredRect scoredRect in scoredResultList) { Rect rect = new Rect(); rect.X = scoredRect.X; rect.Y = scoredRect.Y; rect.Width = scoredRect.Width; rect.Height = scoredRect.Height; if (rect.Contains(leftEye) && rect.Contains(rightEye)) { leftEye.X = (leftEye.X - rect.X) / rect.Width; leftEye.Y = (leftEye.Y - rect.Y) / rect.Width;; rightEye.X = (rightEye.X - rect.X) / rect.Width;; rightEye.Y = (rightEye.Y - rect.Y) / rect.Width;; FaceDisp.FaceData faceData = new FaceDisp.FaceData(features.ptLeftEye, features.ptRightEye, features.ptNose, features.ptLeftMouth, features.ptRightMouth, rect); //ProcessFace(imageFileName, rect, leftEye, rightEye, 0); ProcessFace(imageFileName, rect, faceData, 0); ++imageCount; break; } } } } return(imageCount); }
public void DetectFile(string file) { photoImage = Image.FromFile(file); imageScale = 1; photoRect = new Rectangle(0, 0, (int)(imageScale * photoImage.Size.Width), (int)(imageScale * photoImage.Size.Height)); DateTime start = DateTime.Now; faceDetector.SetTargetDimension(640, 480); // Run face detection DetectionResult detectionResult = faceDetector.DetectObject(file); faceDetectRects = detectionResult.GetMergedRectList(detectionThreshold); TimeSpan detectTime = new TimeSpan(DateTime.Now.Ticks - start.Ticks); leftEyeRects.Clear(); rightEyeRects.Clear(); RunEyeDetection(); }
/// <summary> /// Initialize a photo - run faceDetection /// </summary> /// <param name="mainCanvas">Main canvas reference</param> /// <param name="filename">Full path name to image file</param> public int InitializeWithFaceDetection(BackgroundCanvas mainCanvas, string filename) { if (null == _detector) { _detector = new FaceDetector( mainCanvas.OptionDialog.FaceDetectorDataPath, true, mainCanvas.OptionDialog.FaceDetectorThreshold); } _detector.SetTargetDimension(mainCanvas.OptionDialog.FaceDetectTargetWidth, mainCanvas.OptionDialog.FaceDetectTargetHeight); DetectionResult detectionResult = _detector.DetectObject(filename); List <ScoredRect> scoredResultList = detectionResult.GetMergedRectList(0.0F); if (scoredResultList.Count < 0) { return(0); } List <Rect> faceRects = new List <Rect>(); foreach (ScoredRect scoredRect in scoredResultList) { Rect rect = new Rect(); rect.X = scoredRect.X; rect.Y = scoredRect.Y; rect.Width = scoredRect.Width; rect.Height = scoredRect.Height; faceRects.Add(rect); } _targetRect = new Rect(); _faceDisplayWidth = mainCanvas.OptionDialog.FaceDisplayWidth; _defaultDPI = mainCanvas.OptionDialog.DefaultDPI; return(InitializeInternal(mainCanvas, filename, faceRects, mainCanvas.OptionDialog.BorderWidth, null)); }
private void DetectFile(string file) { photoImage = Image.FromFile(file); imageScale = Math.Min((float)pictureBox1.Size.Width / photoImage.Size.Width, (float)pictureBox1.Size.Height / photoImage.Size.Height); photoRect = new Rectangle(0, 0, (int)(imageScale * photoImage.Size.Width), (int)(imageScale * photoImage.Size.Height)); pictureBoxGraphics.Clear(Color.White); DateTime start = DateTime.Now; faceDetector.SetTargetDimension(640, 480); // Run face detection. There are a few ways of doing this. They should // all yield the same result. It all depends on the form of your image data // Try the different overloads by uncommenting below // Method 1 Directly from a System.Drawing.Imaging object. // Note only underlying data formats that have 1 byte per colour plane are supported Bitmap bitmap = new Bitmap(photoImage); BitmapData bitmapdata = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, bitmap.PixelFormat); DetectionResult detectionResult = faceDetector.DetectObject(bitmapdata); bitmap.UnlockBits(bitmapdata); // Method 2 - Use the image name works for jpg and some other common formats. Supported // formats are not as good as what is supported by the windows decoders //DetectionResult detectionResult = faceDetector.DetectObject(imageFile); // Method 3. Directly from a byte array. This code is included for illustartion only. It is not a suggested way of // actually doing this //Bitmap bitmap = new Bitmap(photoImage); //BitmapData bitmapdata = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), // System.Drawing.Imaging.ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb); //int byteCount = bitmapdata.Height * bitmapdata.Stride; //byte [] bytes = new byte[byteCount]; //System.Runtime.InteropServices.Marshal.Copy(bitmapdata.Scan0, bytes, 0, byteCount); //DetectionResult detectionResult = faceDetector.DetectObject(bitmapdata.Width, // bitmapdata.Height, // bitmapdata.Stride, // 3, // 3 Bytes per Pixel // bytes); //bitmap.UnlockBits(bitmapdata); faceDetectRects = detectionResult.GetMergedRectList((float)numericUpDownFaceDetectThreshold.Value); TimeSpan detectTime = new TimeSpan(DateTime.Now.Ticks - start.Ticks); textBoxFaceDetectTime.Text = detectTime.Milliseconds.ToString(); leftEyeRects.Clear(); rightEyeRects.Clear(); noseRects.Clear(); leftMouthRects.Clear(); rightMouthRects.Clear(); if (true == checkBoxEyeDetect.Checked) { RunEyeDetection(); } }
private int ReportFileUsingFeats(string imageFileName, List <FeaturePts> featureList) { DetectionResult detectionResult = _detector.DetectObject(imageFileName); List <ScoredRect> scoredResultList = detectionResult.GetMergedRectList(0.0F); int imageCount = 0; if (null != featureList && scoredResultList.Count > 0) { if (true != ReadPhoto(imageFileName)) { return(0); } foreach (FeaturePts features in featureList) { Point leftEye = FeaturePtToPoint((System.Drawing.PointF)features.ptLeftEye); Point rightEye = FeaturePtToPoint((System.Drawing.PointF)features.ptRightEye); Point nose = FeaturePtToPoint((System.Drawing.PointF)features.ptNose); Point leftMouth = FeaturePtToPoint((System.Drawing.PointF)features.ptLeftMouth); Point rightMouth = FeaturePtToPoint((System.Drawing.PointF)features.ptRightMouth); foreach (ScoredRect scoredRect in scoredResultList) { Rect rect = new Rect(); rect.X = scoredRect.X; rect.Y = scoredRect.Y; rect.Width = scoredRect.Width; rect.Height = scoredRect.Height; if (rect.Contains(leftEye) && rect.Contains(rightEye)) { _outStream.WriteLine("{0}", imageFileName); _outStream.Write("{0} {1} {2} {3} ", (int)rect.X, (int)rect.Y, (int)rect.Width, (int)rect.Height); _outStream.Write("{0:F3} {1:F3} {2:F3} {3:F3} ", leftEye.X, leftEye.Y, rightEye.X, rightEye.Y); if (true == _doPatchGeneration) { if (false == NormalizeFaceWithEyePos(rect, leftEye, rightEye, null)) { continue; } byte[] target = new byte[1]; target[0] = 0; _normFaceWriter.WriteSample(_normFacePixs, target); Rect largeRect = new Rect(0, 0, 10.0, 10.0); //Rect largeRect = new Rect(0, 0, 40.0, 40.0); //WritePatch(_leftEyeWriter, _normLeftEye, largeRect, 0); //WritePatch(_rightEyeWriter, _normRightEye, largeRect, 0); //WritePatch(_leftMouthWriter, _normLeftMouth, largeRect, 0); //WritePatch(_rightMouthWriter, _normRightMouth, largeRect, 0); //WritePatch(_leftNoseWriter, _leftNormNose, largeRect, 0); //WritePatch(_rightNoseWriter, _rightNormNose, largeRect, 0); } leftEye.X -= rect.X; leftEye.Y -= rect.Y; rightEye.X -= rect.X; rightEye.Y -= rect.Y; double[,] affine = GetFaceAffine(rect, leftEye, rightEye, _targetRect, _normLeftEye, _normRightEye); ReportNormPoint(affine, (System.Drawing.PointF)features.ptLeftEye, rect, _targetRect); ReportNormPoint(affine, (System.Drawing.PointF)features.ptRightEye, rect, _targetRect); ReportNormPoint(affine, (System.Drawing.PointF)features.ptNose, rect, _targetRect); ReportNormPoint(affine, (System.Drawing.PointF)features.ptLeftMouth, rect, _targetRect); ReportNormPoint(affine, (System.Drawing.PointF)features.ptRightMouth, rect, _targetRect); //ReportNormPoint(affine, _normLeftEye, rect, _targetRect); //ReportNormPoint(affine, _normRightEye, rect, _targetRect); //ReportNormPoint(affine, _normNose, rect, _targetRect); //ReportNormPoint(affine, _normLeftMouth, rect, _targetRect); //ReportNormPoint(affine, _normRightMouth, rect, _targetRect); _outStream.WriteLine(); _outStream.Flush(); ++imageCount; break; } } } } return(imageCount); }