Inheritance: HaarCascade
Ejemplo n.º 1
2
 public ImageService(IEventAggregator eventAggregator)
 {
     _eventAggregator = eventAggregator;
     _eventAggregator.Subscribe(this);
     _cascade = new FaceHaarCascade();
     _harHaarObjectDetector = new HaarObjectDetector(_cascade, 30);
     SetupDetector();
 }
Ejemplo n.º 2
0
        public void MaxSizeTest()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                50, ObjectDetectorSearchMode.Default);

            Bitmap bmp = Properties.Resources.lena_color;
            Rectangle[] result;

            target.MaxSize = new Size(10, 60);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(0, result.Length);

            target.MaxSize = new Size(60, 60);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(1, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }

            target.MaxSize = new Size(80, 80);
            result = target.ProcessFrame(bmp);
            Assert.AreEqual(2, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }
        }
Ejemplo n.º 3
0
        public void ProcessFrame2()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                30, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_gray;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);


            target = new HaarObjectDetector(cascade,
                30, ObjectDetectorSearchMode.Default);

            target.ProcessFrame(bmp);

            Assert.AreEqual(6, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);
        }
Ejemplo n.º 4
0
        // Constructor
        public MainForm()
        {
            InitializeComponent();

            HaarCascade cascade = new FaceHaarCascade();
            detector = new HaarObjectDetector(cascade,
                25, ObjectDetectorSearchMode.Single, 1.2f,
                ObjectDetectorScalingMode.GreaterToSmaller);
        }
Ejemplo n.º 5
0
        public void ParseTest()
        {
            StringReader stringReader = new StringReader(Properties.Resources.haarcascade_frontalface_alt);
            HaarCascade cascade1 = HaarCascade.FromXml(stringReader);

            Assert.AreEqual(22, cascade1.Stages.Length);
            Assert.AreEqual(3, cascade1.Stages[0].Trees.Length);
            Assert.AreEqual(1, cascade1.Stages[0].Trees[0].Length);

            Assert.AreEqual(false, cascade1.HasTiltedFeatures);

            // Load the hard coded version of the classifier
            HaarCascade cascade2 = new FaceHaarCascade();

            Assert.AreEqual(cascade1.Stages.Length, cascade2.Stages.Length);
            Assert.AreEqual(cascade1.Height, cascade2.Height);
            Assert.AreEqual(cascade1.Width, cascade2.Width);


            for (int i = 0; i < 3; i++)
            {
                HaarCascadeStage stage1 = cascade1.Stages[i];
                HaarCascadeStage stage2 = cascade2.Stages[i];

                //Assert.AreEqual(stage1.NextIndex, stage2.NextIndex);
                //Assert.AreEqual(stage1.ParentIndex, stage2.ParentIndex);

                Assert.AreEqual(stage1.Threshold, stage2.Threshold);

                Assert.AreEqual(stage1.Trees.Length, stage2.Trees.Length);

                for (int j = 0; j < stage1.Trees.Length && j < stage2.Trees.Length; j++)
                {
                    HaarFeatureNode[] tree1 = stage1.Trees[j];
                    HaarFeatureNode[] tree2 = stage2.Trees[j];

                    Assert.AreEqual(tree1.Length, tree2.Length);

                    for (int k = 0; k < tree1.Length; k++)
                    {
                        HaarFeatureNode node1 = tree1[k];
                        HaarFeatureNode node2 = tree2[k];

                        Assert.AreEqual(node1.LeftNodeIndex, node2.LeftNodeIndex);
                        Assert.AreEqual(node1.RightNodeIndex, node2.RightNodeIndex);

                        Assert.AreEqual(node1.LeftValue, node2.LeftValue);
                        Assert.AreEqual(node1.RightValue, node2.RightValue);

                        Assert.AreEqual(node1.Feature.Tilted, node2.Feature.Tilted);

                        Assert.AreEqual(node1.Threshold, node2.Threshold, 0.000000001);
                    }
                }
            }

        }
Ejemplo n.º 6
0
        public string CleanUpImage(string rawDataUrl, out string intermediateUrl)
        {
            intermediateUrl = null;
            var cs = new FaceHaarCascade();
            var detector = new HaarObjectDetector(cs, 30)
            {
                SearchMode = ObjectDetectorSearchMode.Average,
                ScalingMode = ObjectDetectorScalingMode.SmallerToGreater,
                ScalingFactor = 1.5f,
                UseParallelProcessing = true,
                Suppression = 2
            };

            string contentType;
            var data = GetDataFromUrl(rawDataUrl, out contentType);
            using (var ms = new MemoryStream(data))
            {
                var image = (Bitmap)Bitmap.FromStream(ms);
                new ContrastStretch().ApplyInPlace(image);
                var faces = detector.ProcessFrame(image);

                if (faces.Length > 0)
                {
                    var intermediateImage = new Bitmap(image);
                    new RectanglesMarker(faces, Color.Red).ApplyInPlace(intermediateImage);

                    var boundary = Math.Max(40, faces.Max(i => Math.Max(i.Height, i.Width)));
                    var x1 = Math.Max(0, faces.Min(i => i.Left) - boundary);
                    var y1 = Math.Max(0, faces.Min(i => i.Top) - boundary);
                    var x2 = Math.Min(image.Width, faces.Max(i => i.Right) + boundary);
                    var y2 = Math.Min(image.Height, faces.Max(i => i.Bottom) + boundary);

                    var newBoundingBox = new Rectangle(x1, y1, x2 - x1, y2 - y1);
                    new RectanglesMarker(new [] { newBoundingBox }, Color.Blue).ApplyInPlace(intermediateImage);

                    using (var ms2 = new MemoryStream())
                    {
                        intermediateImage.Save(ms2, ImageFormat.Jpeg);
                        intermediateUrl = string.Concat("data:image/jpeg;base64,", Convert.ToBase64String(ms2.ToArray()));
                    }

                    // perform no cropping of the image - post the original
                }

                // save off at JPG/100
                var finalImage = ImageHelper.GetBytes(s => image.Save(s, ImageHelper.JPEGEncoder(), ImageHelper.Quality(100)));
                var newDataUrl = string.Concat("data:image/jpeg;base64,", Convert.ToBase64String(finalImage));
                return newDataUrl;
            }
        }
Ejemplo n.º 7
0
        public MainForm()
        {
            InitializeComponent();

            cbMode.DataSource = Enum.GetValues(typeof(ObjectDetectorSearchMode));
            cbScaling.DataSource = Enum.GetValues(typeof(ObjectDetectorScalingMode));

            cbMode.SelectedItem = ObjectDetectorSearchMode.Single;
            cbScaling.SelectedItem = ObjectDetectorScalingMode.GreaterToSmaller;

            toolStripStatusLabel1.Text = "Please select the detector options and click Detect to begin.";

            HaarCascade cascade = new FaceHaarCascade();
            detector = new HaarObjectDetector(cascade, 30);
        }
Ejemplo n.º 8
0
        public void ProcessFrame()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                50, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_color;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(126, target.DetectedObjects[0].X);
            Assert.AreEqual(112, target.DetectedObjects[0].Y);
            Assert.AreEqual(59, target.DetectedObjects[0].Width);
            Assert.AreEqual(59, target.DetectedObjects[0].Height);
        }
Ejemplo n.º 9
0
        public void CloneTest()
        {
            HaarCascade expected = new FaceHaarCascade();
            HaarCascade actual = (HaarCascade)expected.Clone();

            Assert.AreNotEqual(expected, actual);

            Assert.AreEqual(expected.HasTiltedFeatures, actual.HasTiltedFeatures);
            Assert.AreEqual(expected.Height, actual.Height);
            Assert.AreEqual(expected.Width, actual.Width);

            Assert.AreNotEqual(expected.Stages, actual.Stages);
            Assert.AreEqual(expected.Stages.Length, actual.Stages.Length);

            for (int i = 0; i < expected.Stages.Length; i++)
                equals(expected.Stages[i], actual.Stages[i]);
        }
Ejemplo n.º 10
0
        public void ProcessFrame()
        {
            #region doc_example
            // In order to use a HaarObjectDetector, first we have to tell it
            // which type of objects we would like to detect. And in a Haar detector,
            // different object classifiers are specified in terms of a HaarCascade.

            // The framework comes with some built-in cascades for common body
            // parts, such as Face and Nose. However, it is also possible to
            // load a cascade from cascade XML definitions in OpenCV 2.0 format.

            // In this example, we will be creating a cascade for a Face detector:
            var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

            // Note: In the case we would like to load it from XML, we could use:
            // var cascade = HaarCascade.FromXml("filename.xml");

            // Now, create a new Haar object detector with the cascade:
            var detector = new HaarObjectDetector(cascade, minSize: 50,
                                                  searchMode: ObjectDetectorSearchMode.NoOverlap);

            // Note that we have specified that we do not want overlapping objects,
            // and that the minimum object an object can have is 50 pixels. Now, we
            // can use the detector to classify a new image. For instance, consider
            // the famous Lena picture:

            Bitmap bmp = Accord.Imaging.Image.Clone(Resources.lena_color);

            // We have to call ProcessFrame to detect all rectangles containing the
            // object we are interested in (which in this case, is the face of Lena):
            Rectangle[] rectangles = detector.ProcessFrame(bmp);

            // The answer will be a single rectangle of dimensions
            //
            //   {X = 126 Y = 112 Width = 59 Height = 59}
            //
            // which indeed contains the only face in the picture.
            #endregion

            Assert.AreEqual(1, detector.DetectedObjects.Length);
            Assert.AreEqual(126, detector.DetectedObjects[0].X);
            Assert.AreEqual(112, detector.DetectedObjects[0].Y);
            Assert.AreEqual(59, detector.DetectedObjects[0].Width);
            Assert.AreEqual(59, detector.DetectedObjects[0].Height);
        }
Ejemplo n.º 11
0
        static void Main(string[] args)
        {
            //http://www.codeproject.com/Tips/561129/Face-Detection-with-Lines-of-Code-VB-NET

            //describing Viola Jones here : http://makematics.com/research/viola-jones/

            //choosing scaling factor : http://www.mathworks.com/help/vision/ref/vision.cascadeobjectdetector-class.html#btc108o

            string fileName = "9_r.jpg";

            var image = new Bitmap("C:/temp/FaceDetection/"+fileName);
            var cascade = new FaceHaarCascade();
            var detector = new HaarObjectDetector(cascade,30);
            detector.SearchMode = ObjectDetectorSearchMode.Average;
            detector.Suppression = 3;
            detector.MaxSize =new Size(image.Width,image.Height);
            int scalingValue = image.Width > image.Height ? image.Width : image.Height;
            detector.ScalingFactor = scalingValue / (scalingValue-0.5f);

            detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller;
            detector.UseParallelProcessing = true;
            detector.Suppression = 1;
            var sw = new Stopwatch();
            sw.Start();

            Rectangle[] faceObjects = detector.ProcessFrame(image);
               var p = new Pen(Color.Aqua,10);

            var graphicRect = Graphics.FromImage(image);

            foreach (var face in faceObjects)
            {
                graphicRect.DrawRectangle(p, face);
            }
            graphicRect.Dispose();
            image.Save("C:/temp/FaceDetection/Results/Average_3/"+fileName);

            sw.Stop();
        }
Ejemplo n.º 12
0
		private async void MainWindow_Loaded(object sender, RoutedEventArgs e)
		{
			// Wait for the client to get setup
			await Task.Delay(5000);

			// Setup Socket Client
			Client = new TcpClient();
			Client.Connect(IPAddress.Parse("10.10.10.100"), 1911);
			ClientStream = Client.GetStream();

			// enumerate video devices
			videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
			// create video source
			videoSource = new VideoCaptureDevice(videoDevices[1].MonikerString);
			videoSource.SnapshotResolution = videoSource.VideoCapabilities[7];
			// set NewFrame event handler
			videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame);

			// Setup Face Detection
			Cascade = new FaceHaarCascade();
			Detector = new HaarObjectDetector(Cascade, 30);
			Detector.SearchMode = ObjectDetectorSearchMode.Average;
			//Detector.ScalingFactor = 1f;
			Detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller;
			Detector.UseParallelProcessing = true;
			Detector.Suppression = 3;

			// Setup Tracking Data
			CameraVector.X = 90;
			CameraVector.Y = 90;
			//ClientStream.Write(Encoding.UTF8.GetBytes("090,090"), 0, 7);

			//await Task.Delay(3000);

			// start the video source
			videoSource.Start();
		}
Ejemplo n.º 13
0
        public void ProcessFrame3()
        {
            HaarCascade cascade = new FaceHaarCascade();
            HaarObjectDetector target = new HaarObjectDetector(cascade,
                15, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.three;

            target.ProcessFrame(bmp);

            Assert.AreEqual(2, target.DetectedObjects.Length);

            int i = 0;
           /* Assert.AreEqual(180, target.DetectedObjects[0].X);
            Assert.AreEqual(275, target.DetectedObjects[0].Y);
            Assert.AreEqual(41, target.DetectedObjects[0].Width);
            Assert.AreEqual(41, target.DetectedObjects[0].Height);
            */
            Assert.AreEqual(168, target.DetectedObjects[i].X);
            Assert.AreEqual(144, target.DetectedObjects[i].Y);
            Assert.AreEqual(49, target.DetectedObjects[i].Width);
            Assert.AreEqual(49, target.DetectedObjects[i].Height);

            i++;
            Assert.AreEqual(392, target.DetectedObjects[i].X);
            Assert.AreEqual(133, target.DetectedObjects[i].Y);
            Assert.AreEqual(59, target.DetectedObjects[i].Width);
            Assert.AreEqual(59, target.DetectedObjects[i].Height);


            target = new HaarObjectDetector(cascade,
                15, ObjectDetectorSearchMode.Single);

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
        }
        public void ParseTest3()
        {
            HaarCascade cascade = new FaceHaarCascade();

            Assert.AreEqual(22, cascade.Stages.Length);
            Assert.AreEqual(3, cascade.Stages[0].Trees.Length);
            Assert.AreEqual(1, cascade.Stages[0].Trees[0].Length);
        }
Ejemplo n.º 15
0
        public void ProcessVideo()
        {
            string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "detector");

            #region doc_video
            // Let's test the detector using a sample video from
            // the collection of test videos in the framework:
            TestVideos ds       = new TestVideos(basePath);
            string     fileName = ds["crowd.mp4"];

            // In this example, we will be creating a cascade for a Face detector:
            var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

            // Now, create a new Haar object detector with the cascade:
            var detector = new HaarObjectDetector(cascade, minSize: 25,
                                                  searchMode: ObjectDetectorSearchMode.Average,
                                                  scalingMode: ObjectDetectorScalingMode.SmallerToGreater,
                                                  scaleFactor: 1.1f)
            {
                Suppression = 5 // This should make sure we only report regions as faces if
                                // they have been detected at least 5 times within different cascade scales.
            };

            // Now, let's open the video using FFMPEG:
            var video = new VideoFileReader();
            video.Open(fileName);

            // And then check the contents of one of the frames:
            Bitmap frame = video.ReadVideoFrame(frameIndex: 0);

            // Creating bitmaps and locking them is an expensive
            // operation. Instead, let's allocate once and reuse
            BitmapData     bitmapData     = frame.LockBits(ImageLockMode.ReadWrite);
            UnmanagedImage unmanagedImage = new UnmanagedImage(bitmapData);

            // We will create a color marker to show the faces
            var objectMarker = new RectanglesMarker(Color.Red);

            // This example is going to show two different ways to save results to disk. The
            // first is to save the results frame-by-frame, saving each individual frame as
            // a separate .png file. The second is to save them as a video in .mp4 format.

            // To save results as a movie clip in mp4 format, you can use:
            VideoFileWriter writer = new VideoFileWriter();
            writer.Open(Path.Combine(basePath, "detected_faces.mp4"), frame.Width, frame.Height);

            // Now, for each frame of the video
            for (int frameIndex = 0; frameIndex < video.FrameCount; frameIndex++)
            {
                // Read the current frame into the bitmap data
                video.ReadVideoFrame(frameIndex, bitmapData);

                // Feed the frame to the tracker
                Rectangle[] faces = detector.ProcessFrame(unmanagedImage);

                // Mark the location of the tracker object in red color
                objectMarker.Rectangles = faces;
                objectMarker.ApplyInPlace(unmanagedImage); // overwrite the frame

                // Save it to disk: first saving each frame separately:
                frame.Save(Path.Combine(basePath, "frame_{0}.png".Format(frameIndex)));

                // And then, saving as a .mp4 file:
                writer.WriteVideoFrame(bitmapData);
            }

            // The generated video can be seen at https://1drv.ms/v/s!AoiTwBxoR4OAoLJhPozzixD25XcbiQ
            video.Close();
            writer.Close();
            #endregion
        }