Viola-Jones Object Detector based on Haar-like features.

The Viola-Jones object detection framework is the first object detection framework to provide competitive object detection rates in real-time proposed in 2001 by Paul Viola and Michael Jones. Although it can be trained to detect a variety of object classes, it was motivated primarily by the problem of face detection.

The implementation of this code has used Viola and Jones' original publication, the OpenCV Library and the Marilena Project as reference. OpenCV is released under a BSD license, it is free for both academic and commercial use. Please be aware that some particular versions of the Haar object detection framework are patented by Viola and Jones and may be subject to restrictions for use in commercial applications. The code has been implemented with full support for tilted Haar features from the ground up.

References: Viola, P. and Jones, M. (2001). Rapid Object Detection using a Boosted Cascade of Simple Features. http://en.wikipedia.org/wiki/Viola-Jones_object_detection_framework

Inheritance: IObjectDetector
 public ImageService(IEventAggregator eventAggregator)
 {
     _eventAggregator = eventAggregator;
     _eventAggregator.Subscribe(this);
     _cascade = new FaceHaarCascade();
     _harHaarObjectDetector = new HaarObjectDetector(_cascade, 30);
     SetupDetector();
 }
        public void MaxSizeTest()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                50, ObjectDetectorSearchMode.Default);

            Bitmap bmp = Properties.Resources.lena_color;
            Rectangle[] result;

            target.MaxSize = new Size(10, 60);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(0, result.Length);

            target.MaxSize = new Size(60, 60);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(1, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }

            target.MaxSize = new Size(80, 80);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(2, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }
        }
        public void ProcessFrame2()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                30, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_gray;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);


            target = new HaarObjectDetector(cascade,
                30, ObjectDetectorSearchMode.Default);

            target.ProcessFrame(bmp);

            Assert.AreEqual(6, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);
        }
Exemple #4
0
        // Constructor
        public MainForm()
        {
            InitializeComponent();

            HaarCascade cascade = new FaceHaarCascade();
            detector = new HaarObjectDetector(cascade,
                25, ObjectDetectorSearchMode.Single, 1.2f,
                ObjectDetectorScalingMode.GreaterToSmaller);
        }
        public string CleanUpImage(string rawDataUrl, out string intermediateUrl)
        {
            intermediateUrl = null;
            var cs = new FaceHaarCascade();
            var detector = new HaarObjectDetector(cs, 30)
            {
                SearchMode = ObjectDetectorSearchMode.Average,
                ScalingMode = ObjectDetectorScalingMode.SmallerToGreater,
                ScalingFactor = 1.5f,
                UseParallelProcessing = true,
                Suppression = 2
            };

            string contentType;
            var data = GetDataFromUrl(rawDataUrl, out contentType);
            using (var ms = new MemoryStream(data))
            {
                var image = (Bitmap)Bitmap.FromStream(ms);
                new ContrastStretch().ApplyInPlace(image);
                var faces = detector.ProcessFrame(image);

                if (faces.Length > 0)
                {
                    var intermediateImage = new Bitmap(image);
                    new RectanglesMarker(faces, Color.Red).ApplyInPlace(intermediateImage);

                    var boundary = Math.Max(40, faces.Max(i => Math.Max(i.Height, i.Width)));
                    var x1 = Math.Max(0, faces.Min(i => i.Left) - boundary);
                    var y1 = Math.Max(0, faces.Min(i => i.Top) - boundary);
                    var x2 = Math.Min(image.Width, faces.Max(i => i.Right) + boundary);
                    var y2 = Math.Min(image.Height, faces.Max(i => i.Bottom) + boundary);

                    var newBoundingBox = new Rectangle(x1, y1, x2 - x1, y2 - y1);
                    new RectanglesMarker(new [] { newBoundingBox }, Color.Blue).ApplyInPlace(intermediateImage);

                    using (var ms2 = new MemoryStream())
                    {
                        intermediateImage.Save(ms2, ImageFormat.Jpeg);
                        intermediateUrl = string.Concat("data:image/jpeg;base64,", Convert.ToBase64String(ms2.ToArray()));
                    }

                    // perform no cropping of the image - post the original
                }

                // save off at JPG/100
                var finalImage = ImageHelper.GetBytes(s => image.Save(s, ImageHelper.JPEGEncoder(), ImageHelper.Quality(100)));
                var newDataUrl = string.Concat("data:image/jpeg;base64,", Convert.ToBase64String(finalImage));
                return newDataUrl;
            }
        }
Exemple #6
0
        public MainForm()
        {
            InitializeComponent();

            cbMode.DataSource = Enum.GetValues(typeof(ObjectDetectorSearchMode));
            cbScaling.DataSource = Enum.GetValues(typeof(ObjectDetectorScalingMode));

            cbMode.SelectedItem = ObjectDetectorSearchMode.Single;
            cbScaling.SelectedItem = ObjectDetectorScalingMode.GreaterToSmaller;

            toolStripStatusLabel1.Text = "Please select the detector options and click Detect to begin.";

            HaarCascade cascade = new FaceHaarCascade();
            detector = new HaarObjectDetector(cascade, 30);
        }
        public void ProcessFrame()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                50, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_color;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(126, target.DetectedObjects[0].X);
            Assert.AreEqual(112, target.DetectedObjects[0].Y);
            Assert.AreEqual(59, target.DetectedObjects[0].Width);
            Assert.AreEqual(59, target.DetectedObjects[0].Height);
        }
        static void Main(string[] args)
        {
            // 画像の取得
            var img = new Bitmap(@"src.jpg");
            // グレースケール化
            //var gray = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721).Apply(img);
            // カスケード識別器の読み込み
            var cascadeFace = Accord.Vision.Detection.Cascades.FaceHaarCascade.FromXml(@"haarcascade_frontalface_default.xml");
            // Haar-Like特徴量による物体検出を行うクラスの生成
            var detectorFace = new Accord.Vision.Detection.HaarObjectDetector(cascadeFace);

            // 読み込んだ画像から顔の位置を検出(顔の位置はRectangle[]で返される)
            var faces = detectorFace.ProcessFrame(img);

            // 画像に検出された顔の位置を書き込みPictureBoxに表示
            var markerFaces = new Accord.Imaging.Filters.RectanglesMarker(faces, Color.Yellow);
            img = markerFaces.Apply(img);
            // 保存
            //Bitmap img2 = markerFaces.ToBitmap();
            img.Save(@"dst.jpg");
            img.Dispose();
        }
Exemple #9
0
        static void Main(string[] args)
        {
            //http://www.codeproject.com/Tips/561129/Face-Detection-with-Lines-of-Code-VB-NET

            //describing Viola Jones here : http://makematics.com/research/viola-jones/

            //choosing scaling factor : http://www.mathworks.com/help/vision/ref/vision.cascadeobjectdetector-class.html#btc108o

            string fileName = "9_r.jpg";

            var image = new Bitmap("C:/temp/FaceDetection/"+fileName);
            var cascade = new FaceHaarCascade();
            var detector = new HaarObjectDetector(cascade,30);
            detector.SearchMode = ObjectDetectorSearchMode.Average;
            detector.Suppression = 3;
            detector.MaxSize =new Size(image.Width,image.Height);
            int scalingValue = image.Width > image.Height ? image.Width : image.Height;
            detector.ScalingFactor = scalingValue / (scalingValue-0.5f);

            detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller;
            detector.UseParallelProcessing = true;
            detector.Suppression = 1;
            var sw = new Stopwatch();
            sw.Start();

            Rectangle[] faceObjects = detector.ProcessFrame(image);
               var p = new Pen(Color.Aqua,10);

            var graphicRect = Graphics.FromImage(image);

            foreach (var face in faceObjects)
            {
                graphicRect.DrawRectangle(p, face);
            }
            graphicRect.Dispose();
            image.Save("C:/temp/FaceDetection/Results/Average_3/"+fileName);

            sw.Stop();
        }
Exemple #10
0
        static void Main(string[] args)
        {
            // 画像の取得
            var img = new Bitmap(@"src.jpg");
            // グレースケール化
            //var gray = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721).Apply(img);
            // カスケード識別器の読み込み
            var cascadeFace = Accord.Vision.Detection.Cascades.FaceHaarCascade.FromXml(@"haarcascade_frontalface_default.xml");
            // Haar-Like特徴量による物体検出を行うクラスの生成
            var detectorFace = new Accord.Vision.Detection.HaarObjectDetector(cascadeFace);

            // 読み込んだ画像から顔の位置を検出(顔の位置はRectangle[]で返される)
            var faces = detectorFace.ProcessFrame(img);

            // 画像に検出された顔の位置を書き込みPictureBoxに表示
            var markerFaces = new Accord.Imaging.Filters.RectanglesMarker(faces, Color.Yellow);

            img = markerFaces.Apply(img);
            // 保存
            //Bitmap img2 = markerFaces.ToBitmap();
            img.Save(@"dst.jpg");
            img.Dispose();
        }
Exemple #11
0
		private async void MainWindow_Loaded(object sender, RoutedEventArgs e)
		{
			// Wait for the client to get setup
			await Task.Delay(5000);

			// Setup Socket Client
			Client = new TcpClient();
			Client.Connect(IPAddress.Parse("10.10.10.100"), 1911);
			ClientStream = Client.GetStream();

			// enumerate video devices
			videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
			// create video source
			videoSource = new VideoCaptureDevice(videoDevices[1].MonikerString);
			videoSource.SnapshotResolution = videoSource.VideoCapabilities[7];
			// set NewFrame event handler
			videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame);

			// Setup Face Detection
			Cascade = new FaceHaarCascade();
			Detector = new HaarObjectDetector(Cascade, 30);
			Detector.SearchMode = ObjectDetectorSearchMode.Average;
			//Detector.ScalingFactor = 1f;
			Detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller;
			Detector.UseParallelProcessing = true;
			Detector.Suppression = 3;

			// Setup Tracking Data
			CameraVector.X = 90;
			CameraVector.Y = 90;
			//ClientStream.Write(Encoding.UTF8.GetBytes("090,090"), 0, 7);

			//await Task.Delay(3000);

			// start the video source
			videoSource.Start();
		}
Exemple #12
0
        /// <summary>
        /// Initialize sensor
        /// </summary>
        private void init()
        {
            try
            {
                if (isLocalCamera)
                {
                    var videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
                    video = new VideoCaptureDevice(videoDevices[localCameraIndex].MonikerString);
                }
                else
                {
                    video = new MJPEGStream(this.cameraUrl);
                    ((MJPEGStream)video).Login    = this.cameraLogin;
                    ((MJPEGStream)video).Password = this.cameraPassword;
                }
                video.NewFrame         += new NewFrameEventHandler(processFrame);
                video.VideoSourceError += new VideoSourceErrorEventHandler(processFrameError);

                motionMarker = new MotionAreaHighlighting();

                motion = new MotionDetector(
                    new SimpleBackgroundModelingDetector(),
                    motionMarker
                    );

                detectorFace        = Detector.Face();
                detectorEye         = Detector.Eye();
                detectorNose        = Detector.Nose();
                detectorEarLeft     = Detector.EarLeft();
                detectorEarRight    = Detector.EarRight();
                detectorFaceProfile = Detector.FaceProfile();
            }
            catch (Exception ex)
            {
                updateError(ex.Message);
            }
        }
Exemple #13
0
        /// <summary>
        ///   Initializes a new instance of the <see cref="HeadController"/> class.
        /// </summary>
        /// 
        public HeadController()
        {
            // Setup tracker
            tracker = new Camshift();
            tracker.Mode = CamshiftMode.RGB;
            tracker.Conservative = true;
            tracker.AspectRatio = 1.5f;

            // Setup detector
            detector = new HaarObjectDetector(new FaceHaarCascade());
            detector.MinSize = new Size(25, 25);
            detector.SearchMode = ObjectDetectorSearchMode.Single;
            detector.ScalingFactor = 1.2f;
            detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller;

            xaxisRange = new IntRange(0, 320);
            yaxisRange = new IntRange(0, 240);
            scaleRange = new DoubleRange(0, Math.Sqrt(320 * 240));
            angleRange = new DoubleRange(0, 2 * Math.PI);
        }
		/// <summary>
		/// Creates Face Haar detector
		/// </summary>
		/// <returns>Returns asynchronous void Task</returns>
		private async Task InitializeDetectorsAsync() {
			await Task.Run(() => {
				this.log.Info("Initializing face detector...");

				// InitializeAsync face detector for multiple objects
				this.detector = new HaarObjectDetector(
					new FaceHaarCascade(),
					this.DetectingAreaMinDefault, ObjectDetectorSearchMode.NoOverlap,
					this.DetectingAreaStepDefault, ObjectDetectorScalingMode.GreaterToSmaller) {
						UseParallelProcessing = true
					};
			});
		}
Exemple #15
0
        private void Detect(Bitmap image,ObjectDetectorSearchMode searchMode, int supperession,ObjectDetectorScalingMode scalingMode,string targetPath,bool parallelProcessing)
        {
            //http://www.codeproject.com/Tips/561129/Face-Detection-with-Lines-of-Code-VB-NET

            //describing Viola Jones here : http://makematics.com/research/viola-jones/

            //choosing scaling factor : http://www.mathworks.com/help/vision/ref/vision.cascadeobjectdetector-class.html#btc108o

            var detector = new HaarObjectDetector(new FaceHaarCascade(), 30);
            detector.SearchMode = searchMode;
            if (searchMode == ObjectDetectorSearchMode.Average)
                detector.Suppression = supperession;
            detector.MaxSize = new Size(image.Width, image.Height);
            detector.ScalingMode = scalingMode;
            detector.UseParallelProcessing = parallelProcessing;

            int scalingValue = image.Width > image.Height ? image.Width : image.Height;
            detector.ScalingFactor = scalingValue / (scalingValue - 0.5f);

            Rectangle[] faceObjects = detector.ProcessFrame(image);
            var p = new Pen(Color.Aqua, 10);

            var graphicRect = Graphics.FromImage(image);

            foreach (var face in faceObjects)
            {
                graphicRect.DrawRectangle(p, face);
            }
            graphicRect.Dispose();
            image.Save(targetPath);
        }
        public void ProcessFrame3()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                15, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.three;

            target.ProcessFrame(bmp);

            Assert.AreEqual(3, target.DetectedObjects.Length);
            Assert.AreEqual(180, target.DetectedObjects[0].X);
            Assert.AreEqual(275, target.DetectedObjects[0].Y);
            Assert.AreEqual(41, target.DetectedObjects[0].Width);
            Assert.AreEqual(41, target.DetectedObjects[0].Height);

            Assert.AreEqual(168, target.DetectedObjects[1].X);
            Assert.AreEqual(144, target.DetectedObjects[1].Y);
            Assert.AreEqual(49, target.DetectedObjects[1].Width);
            Assert.AreEqual(49, target.DetectedObjects[1].Height);

            Assert.AreEqual(392, target.DetectedObjects[2].X);
            Assert.AreEqual(133, target.DetectedObjects[2].Y);
            Assert.AreEqual(59, target.DetectedObjects[2].Width);
            Assert.AreEqual(59, target.DetectedObjects[2].Height);


            target = new HaarObjectDetector(cascade,
                15, ObjectDetectorSearchMode.Single);

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
        }
        /// <summary>
        ///   Initializes a new instance of the <see cref="FaceController"/> class.
        /// </summary>
        /// 
        public FaceController()
        {
            // Setup tracker
            tracker = new MatchingTracker();
            tracker.Extract = false;

            // Setup detector
            detector = new HaarObjectDetector(new NoseHaarCascade());
            detector.ScalingFactor = 1.1f;
            detector.SearchMode = ObjectDetectorSearchMode.Single;
            detector.ScalingMode = ObjectDetectorScalingMode.SmallerToGreater;
            detector.MinSize = new Size(2, 5);
            //detector.MaxSize = new Size(15, 18);
        }