ProcessFrame() public method

Performs object detection on the given frame.
public ProcessFrame ( Bitmap frame ) : System.Drawing.Rectangle[]
frame System.Drawing.Bitmap
return System.Drawing.Rectangle[]
        public void MaxSizeTest()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                50, ObjectDetectorSearchMode.Default);

            Bitmap bmp = Properties.Resources.lena_color;
            Rectangle[] result;

            target.MaxSize = new Size(10, 60);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(0, result.Length);

            target.MaxSize = new Size(60, 60);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(1, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }

            target.MaxSize = new Size(80, 80);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(2, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }
        }
Beispiel #2
0
        public void ProcessFrame2()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                30, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_gray;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);


            target = new HaarObjectDetector(cascade,
                30, ObjectDetectorSearchMode.Default);

            target.ProcessFrame(bmp);

            Assert.AreEqual(6, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);
        }
        public string CleanUpImage(string rawDataUrl, out string intermediateUrl)
        {
            intermediateUrl = null;
            var cs = new FaceHaarCascade();
            var detector = new HaarObjectDetector(cs, 30)
            {
                SearchMode = ObjectDetectorSearchMode.Average,
                ScalingMode = ObjectDetectorScalingMode.SmallerToGreater,
                ScalingFactor = 1.5f,
                UseParallelProcessing = true,
                Suppression = 2
            };

            string contentType;
            var data = GetDataFromUrl(rawDataUrl, out contentType);
            using (var ms = new MemoryStream(data))
            {
                var image = (Bitmap)Bitmap.FromStream(ms);
                new ContrastStretch().ApplyInPlace(image);
                var faces = detector.ProcessFrame(image);

                if (faces.Length > 0)
                {
                    var intermediateImage = new Bitmap(image);
                    new RectanglesMarker(faces, Color.Red).ApplyInPlace(intermediateImage);

                    var boundary = Math.Max(40, faces.Max(i => Math.Max(i.Height, i.Width)));
                    var x1 = Math.Max(0, faces.Min(i => i.Left) - boundary);
                    var y1 = Math.Max(0, faces.Min(i => i.Top) - boundary);
                    var x2 = Math.Min(image.Width, faces.Max(i => i.Right) + boundary);
                    var y2 = Math.Min(image.Height, faces.Max(i => i.Bottom) + boundary);

                    var newBoundingBox = new Rectangle(x1, y1, x2 - x1, y2 - y1);
                    new RectanglesMarker(new [] { newBoundingBox }, Color.Blue).ApplyInPlace(intermediateImage);

                    using (var ms2 = new MemoryStream())
                    {
                        intermediateImage.Save(ms2, ImageFormat.Jpeg);
                        intermediateUrl = string.Concat("data:image/jpeg;base64,", Convert.ToBase64String(ms2.ToArray()));
                    }

                    // perform no cropping of the image - post the original
                }

                // save off at JPG/100
                var finalImage = ImageHelper.GetBytes(s => image.Save(s, ImageHelper.JPEGEncoder(), ImageHelper.Quality(100)));
                var newDataUrl = string.Concat("data:image/jpeg;base64,", Convert.ToBase64String(finalImage));
                return newDataUrl;
            }
        }
Beispiel #4
0
        public void ProcessFrame()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                50, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_color;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(126, target.DetectedObjects[0].X);
            Assert.AreEqual(112, target.DetectedObjects[0].Y);
            Assert.AreEqual(59, target.DetectedObjects[0].Width);
            Assert.AreEqual(59, target.DetectedObjects[0].Height);
        }
Beispiel #5
0
        static void Main(string[] args)
        {
            // 画像の取得
            var img = new Bitmap(@"src.jpg");
            // グレースケール化
            //var gray = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721).Apply(img);
            // カスケード識別器の読み込み
            var cascadeFace = Accord.Vision.Detection.Cascades.FaceHaarCascade.FromXml(@"haarcascade_frontalface_default.xml");
            // Haar-Like特徴量による物体検出を行うクラスの生成
            var detectorFace = new Accord.Vision.Detection.HaarObjectDetector(cascadeFace);

            // 読み込んだ画像から顔の位置を検出(顔の位置はRectangle[]で返される)
            var faces = detectorFace.ProcessFrame(img);

            // 画像に検出された顔の位置を書き込みPictureBoxに表示
            var markerFaces = new Accord.Imaging.Filters.RectanglesMarker(faces, Color.Yellow);
            img = markerFaces.Apply(img);
            // 保存
            //Bitmap img2 = markerFaces.ToBitmap();
            img.Save(@"dst.jpg");
            img.Dispose();
        }
Beispiel #6
0
        static void Main(string[] args)
        {
            //http://www.codeproject.com/Tips/561129/Face-Detection-with-Lines-of-Code-VB-NET

            //describing Viola Jones here : http://makematics.com/research/viola-jones/

            //choosing scaling factor : http://www.mathworks.com/help/vision/ref/vision.cascadeobjectdetector-class.html#btc108o

            string fileName = "9_r.jpg";

            var image = new Bitmap("C:/temp/FaceDetection/"+fileName);
            var cascade = new FaceHaarCascade();
            var detector = new HaarObjectDetector(cascade,30);
            detector.SearchMode = ObjectDetectorSearchMode.Average;
            detector.Suppression = 3;
            detector.MaxSize =new Size(image.Width,image.Height);
            int scalingValue = image.Width > image.Height ? image.Width : image.Height;
            detector.ScalingFactor = scalingValue / (scalingValue-0.5f);

            detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller;
            detector.UseParallelProcessing = true;
            detector.Suppression = 1;
            var sw = new Stopwatch();
            sw.Start();

            Rectangle[] faceObjects = detector.ProcessFrame(image);
               var p = new Pen(Color.Aqua,10);

            var graphicRect = Graphics.FromImage(image);

            foreach (var face in faceObjects)
            {
                graphicRect.DrawRectangle(p, face);
            }
            graphicRect.Dispose();
            image.Save("C:/temp/FaceDetection/Results/Average_3/"+fileName);

            sw.Stop();
        }
Beispiel #7
0
        static void Main(string[] args)
        {
            // 画像の取得
            var img = new Bitmap(@"src.jpg");
            // グレースケール化
            //var gray = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721).Apply(img);
            // カスケード識別器の読み込み
            var cascadeFace = Accord.Vision.Detection.Cascades.FaceHaarCascade.FromXml(@"haarcascade_frontalface_default.xml");
            // Haar-Like特徴量による物体検出を行うクラスの生成
            var detectorFace = new Accord.Vision.Detection.HaarObjectDetector(cascadeFace);

            // 読み込んだ画像から顔の位置を検出(顔の位置はRectangle[]で返される)
            var faces = detectorFace.ProcessFrame(img);

            // 画像に検出された顔の位置を書き込みPictureBoxに表示
            var markerFaces = new Accord.Imaging.Filters.RectanglesMarker(faces, Color.Yellow);

            img = markerFaces.Apply(img);
            // 保存
            //Bitmap img2 = markerFaces.ToBitmap();
            img.Save(@"dst.jpg");
            img.Dispose();
        }
        public void ProcessFrame3()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                15, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.three;

            target.ProcessFrame(bmp);

            Assert.AreEqual(3, target.DetectedObjects.Length);
            Assert.AreEqual(180, target.DetectedObjects[0].X);
            Assert.AreEqual(275, target.DetectedObjects[0].Y);
            Assert.AreEqual(41, target.DetectedObjects[0].Width);
            Assert.AreEqual(41, target.DetectedObjects[0].Height);

            Assert.AreEqual(168, target.DetectedObjects[1].X);
            Assert.AreEqual(144, target.DetectedObjects[1].Y);
            Assert.AreEqual(49, target.DetectedObjects[1].Width);
            Assert.AreEqual(49, target.DetectedObjects[1].Height);

            Assert.AreEqual(392, target.DetectedObjects[2].X);
            Assert.AreEqual(133, target.DetectedObjects[2].Y);
            Assert.AreEqual(59, target.DetectedObjects[2].Width);
            Assert.AreEqual(59, target.DetectedObjects[2].Height);


            target = new HaarObjectDetector(cascade,
                15, ObjectDetectorSearchMode.Single);

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
        }
Beispiel #9
0
        private void Detect(Bitmap image,ObjectDetectorSearchMode searchMode, int supperession,ObjectDetectorScalingMode scalingMode,string targetPath,bool parallelProcessing)
        {
            //http://www.codeproject.com/Tips/561129/Face-Detection-with-Lines-of-Code-VB-NET

            //describing Viola Jones here : http://makematics.com/research/viola-jones/

            //choosing scaling factor : http://www.mathworks.com/help/vision/ref/vision.cascadeobjectdetector-class.html#btc108o

            var detector = new HaarObjectDetector(new FaceHaarCascade(), 30);
            detector.SearchMode = searchMode;
            if (searchMode == ObjectDetectorSearchMode.Average)
                detector.Suppression = supperession;
            detector.MaxSize = new Size(image.Width, image.Height);
            detector.ScalingMode = scalingMode;
            detector.UseParallelProcessing = parallelProcessing;

            int scalingValue = image.Width > image.Height ? image.Width : image.Height;
            detector.ScalingFactor = scalingValue / (scalingValue - 0.5f);

            Rectangle[] faceObjects = detector.ProcessFrame(image);
            var p = new Pen(Color.Aqua, 10);

            var graphicRect = Graphics.FromImage(image);

            foreach (var face in faceObjects)
            {
                graphicRect.DrawRectangle(p, face);
            }
            graphicRect.Dispose();
            image.Save(targetPath);
        }
Beispiel #10
0
        /// <summary>
        /// Image processor and the heart of sensor
        /// </summary>
        /// <param name="sender">object - owner</param>
        /// <param name="eventArgs">NewFrameEventArgs - args, contains frame from camera</param>
        private void processFrame(object sender, NewFrameEventArgs eventArgs)
        {
            Rectangle   rect = eventArgs.Frame.Bounds();
            PixelFormat pf   = eventArgs.Frame.PixelFormat;

            Bitmap frame = eventArgs.Frame.Clone(rect, pf);

            scaleX = frame.Width / processWidth;
            scaleY = frame.Height / processHeight;
            Bitmap frameFace = eventArgs.Frame.Clone(rect, pf);

            if (OnMotionDetected != null)
            {
                var dataMotion = frame.GetDirectAccess();
                var frameUI    = dataMotion.GetUnmanaged();
                if (motion.ProcessFrame(frameUI) > 0.15)
                {
                    updateMotion(true);
                }
                else
                {
                    updateMotion(false);
                }
                frame.UnlockBits(dataMotion);
            }

            if (OnFaceDetected != null)
            {
                var dataFace        = frameFace.GetDirectAccess();
                var faceUI          = dataFace.GetUnmanaged();
                var downsample      = faceUI.ResizeTo(processWidth, processHeight);
                var faceDetections  = detectorFace.ProcessFrame(downsample);
                var faceDetections2 = detectorFaceProfile.ProcessFrame(downsample);

                if (isPreview)
                {
                    if (faceDetections.Length > 0)
                    {
                        marker             = new Accord.Imaging.Filters.RectanglesMarker(faceDetections.Scale(scaleX, scaleY));
                        marker.MarkerColor = Color.Yellow;
                        frame = marker.Apply(frame);
                    }

                    if (faceDetections2.Length > 0)
                    {
                        marker             = new Accord.Imaging.Filters.RectanglesMarker(faceDetections2.Scale(scaleX, scaleY));
                        marker.MarkerColor = Color.Yellow;
                        frame = marker.Apply(frame);
                    }
                }


                frameFace.UnlockBits(dataFace);

                if (detectorFace.DetectedObjects != null && detectorFace.DetectedObjects.Length > 0)
                {
                    var faces = detectorFace.DetectedObjects.ToFaces((int)scaleX, (int)scaleY);
                    for (int i = 0; i < faces.Length; i++)
                    {
                        var cutter = new AForge.Imaging.Filters.Crop(faces[i].Bounds);
                        faces[i].FaceImage = cutter.Apply(frameFace);

                        if (searchForFaceDirection)
                        {
                            detectorNose.ProcessFrame(faces[i].FaceImage);
                            if (detectorNose.DetectedObjects.Length > 0)
                            {
                                faces[i].Direction = FaceDirection.Frontal;
                            }
                        }

                        var eyeDetections = detectorEye.ProcessFrame(faces[i].FaceImage);

                        if (eyeDetections.Length > 0)
                        {
                            if (eyeDetections.Length >= 1)
                            {
                                faces[i].Direction = FaceDirection.Frontal;
                            }

                            Eye[] eyes = new Eye[eyeDetections.Length];
                            for (int ie = 0; ie < eyes.Length; ie++)
                            {
                                eyes[ie]        = new Eye();
                                eyes[ie].Left   = faces[i].Left + eyeDetections[ie].X;
                                eyes[ie].Top    = faces[i].Top + eyeDetections[ie].Y;
                                eyes[ie].Width  = eyeDetections[ie].Width;
                                eyes[ie].Height = eyeDetections[ie].Height;
                                var cutter2 = new AForge.Imaging.Filters.Crop(eyes[ie].Bounds);
                                eyes[ie].EyeImage = cutter.Apply(frameFace);
                            }

                            if (isPreview)
                            {
                                marker             = new Accord.Imaging.Filters.RectanglesMarker(eyes.toRects());
                                marker.MarkerColor = Color.Orange;
                                frame = marker.Apply(frame);
                            }

                            updateEyeDetected(eyes);
                        }
                    }
                    updateFaceDetected(faces);
                }
                else if (detectorFaceProfile.DetectedObjects != null && detectorFaceProfile.DetectedObjects.Length > 0)
                {
                    var faces = detectorFaceProfile.DetectedObjects.ToFaces((int)scaleX, (int)scaleY);
                    for (int i = 0; i < faces.Length; i++)
                    {
                        var cutter = new AForge.Imaging.Filters.Crop(faces[i].Bounds);
                        faces[i].FaceImage = cutter.Apply(frameFace);

                        if (searchForFaceDirection)
                        {
                            detectorEarLeft.ProcessFrame(faces[i].FaceImage);
                            if (detectorEarLeft.DetectedObjects.Length > 0)
                            {
                                faces[i].Direction = FaceDirection.TurnedRight;
                            }
                            else
                            {
                                detectorEarRight.ProcessFrame(faces[i].FaceImage);
                                if (detectorEarRight.DetectedObjects.Length > 0)
                                {
                                    faces[i].Direction = FaceDirection.TurnedLeft;
                                }
                                else
                                {
                                    faces[i].Direction = FaceDirection.NoInfo;
                                }
                            }
                        }
                    }
                    updateFaceDetected(faces);
                }
            }


            updateFrameReceived(frame);
        }