//private FaceEyes Detect(byte [] imageBytes) private FaceEyes Detect() { FaceEyes FE = new FaceEyes(); Mat image = new Mat(Assets, "lena.jpg"); using (CascadeClassifier face = new CascadeClassifier(faceXml)) using (CascadeClassifier eye = new CascadeClassifier(eyeXml)) { //watch = Stopwatch.StartNew(); using (UMat ugray = new UMat()) { CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Detect the faces from the gray scale image and store the locations as rectangle //The first dimensional is the channel //The second dimension is the index of the rectangle in the specific channel Rectangle[] facesDetected = face.DetectMultiScale( ugray, 1.1, 10, new System.Drawing.Size(20, 20)); FE.Faces.AddRange(facesDetected); foreach (Rectangle f in facesDetected) { //Get the region of interest on the faces using (UMat faceRegion = new UMat(ugray, f)) { Rectangle[] eyesDetected = eye.DetectMultiScale( faceRegion, 1.1, 10, new System.Drawing.Size(20, 20)); foreach (Rectangle e in eyesDetected) { Rectangle eyeRect = e; eyeRect.Offset(f.X, f.Y); FE.Eyes.Add(eyeRect); } } } } //watch.Stop(); } return(FE); }
//private FaceEyes Detect(byte [] imageBytes) private FaceEyes Detect(Mat image2) //private FaceEyes Detect(Mat ugray) { FaceEyes FE = new FaceEyes(); if (/*face != null &&*/ eye != null) { //watch = Stopwatch.StartNew(); Log.Error(TAG, "\t\t -- FaceEyes Detect()"); //using (UMat ugray = new UMat()) //{ Mat ugray = new Mat(); CvInvoke.CvtColor(image2, ugray, Emgu.CV.CvEnum.ColorConversion.Rgb2Gray); //normalizes brightness and increases contrast of the image CvInvoke.EqualizeHist(ugray, ugray); //Rectangle[] facesDetected = face.DetectMultiScale( // ugray, 1.1, 10, new System.Drawing.Size(10,10)); //FE.Faces.AddRange(facesDetected); //foreach (Rectangle f in facesDetected) //{ //Log.Error(TAG, "\t\t -- FaceEyes Detect()\t FACE DETECTED"); //Mat faceRegion = new Mat(ugray, f); //Get the region of interest on the faces //using (UMat faceRegion = new UMat(ugray, f)) //{ Rectangle[] eyesDetected = eye.DetectMultiScale( ugray, 1.1, 20, new System.Drawing.Size(20, 20)); //faceRegion, 1.1, 10, new System.Drawing.Size(20, 20)); foreach (Rectangle e in eyesDetected) { Log.Error(TAG, "\t\t -- FaceEyes Detect()\t EYE DETECTED"); Rectangle eyeRect = e; //eyeRect.Offset(e.X,e.Y); //offset from top(x)-left(y) FE.Eyes.Add(eyeRect); } //} //} //} //watch.Stop(); } return(FE); }
private void DrawFocusRect(ISurfaceHolder holder, float RectLeft, float RectTop, float RectRight, float RectBottom, Android.Graphics.Color color) { //lock var canvas = holder.LockCanvas(); //no pointer to canvas? if (canvas == null) { return; } //detect face FaceEyes FE = Detect(); //clear out canvas.DrawColor(Android.Graphics.Color.Transparent, Android.Graphics.PorterDuff.Mode.Clear); //border's properties var paint = new Android.Graphics.Paint(); paint.SetStyle(Android.Graphics.Paint.Style.Stroke); paint.Color = color; paint.StrokeWidth = 3; Rectangle e0 = FE.Eyes[0]; Rectangle e1 = FE.Eyes[1]; Rectangle f0 = FE.Faces[0]; canvas.DrawRect(new Rect(e0.Left, e0.Top, e0.Right, e0.Bottom), paint); canvas.DrawRect(new Rect(e1.Left, e1.Top, e1.Right, e1.Bottom), paint); paint.Color = Android.Graphics.Color.White; canvas.DrawRect(new Rect(f0.Left, f0.Top, f0.Right, f0.Bottom), paint); //unlock holder.UnlockCanvasAndPost(canvas); }
public void PreviewFrame(Mat fnMat) //public void PreviewFrame(Image yuvImg) { /* EMIL EXAMPLE * //just move the box around to see something happen * lock (_lock) * { * if (!isBusy) * { * isBusy = true; * var results = Detect(image); * * * var PreviewThread = Task.Factory.StartNew(() => * { * screenX -= 5; * screenY -= 5; * * if (screenX < 150 || screenY < 150) * { * var manager = GetSystemService(Context.WindowService).JavaCast<IWindowManager>(); * var size = new Android.Graphics.Point(); * manager.DefaultDisplay.GetSize(size); * * screenX = size.X - 150; * screenY = size.Y - 150; * } * * L = screenX - 100; * T = screenY - 100; * R = screenX + 100; * B = screenY + 100; * * RunOnUiThread(() => * { * //code to update UI * DrawFocusRect(mTransparentView.Holder, L, T, R, B, Android.Graphics.Color.Yellow); * }); * }); * * //free lock * lock (_lock) * { * isBusy = false; * } * } * }*/ Log.Error(TAG, "\t\t--in PreviewFrame--"); CLEAR_CANVAS = false; lock (_lock) { if (!isBusy) { isBusy = true; PreviewThread = Task.Factory.StartNew(() => { try { //face = new CascadeClassifier(faceXml); eye = new CascadeClassifier(eyeXml); Log.Error(TAG, "\t\t--PreviewThread: " + this); //Mat image = Yuv2Rgb(yuvImg); //Mat rgbMat = new Mat(); //CvInvoke.CvtColor(yuvMat, rgbMat, Emgu.CV.CvEnum.ColorConversion.Yuv420P2Rgb); RESULTS = Detect(fnMat); CLEAR_CANVAS = true; RunOnUiThread(() => { if (RESULTS != null && RESULTS.Count > 0) { /*COLOR = Android.Graphics.Color.White; * foreach (var face in RESULTS.Faces) * { * L = face.Left; T = face.Top; * R = face.Right; B = face.Bottom; * * DrawFocusRect(mTransparentView.Holder, L, T, R, B, COLOR); * }*/ foreach (var eye in RESULTS.Eyes) { L = eye.Left; T = eye.Top; R = eye.Right; B = eye.Bottom; COLOR = Android.Graphics.Color.Green; DrawFocusRect(mTransparentView.Holder, eye.Left, eye.Top, eye.Right, eye.Bottom, COLOR); } } else { COLOR = Android.Graphics.Color.Red; DrawFocusRect(mTransparentView.Holder, L, T, R, B, COLOR); } }); } catch (System.Exception ex) { } //free lock lock (_lock) { isBusy = false; } }); } } //*/ }