コード例 #1
0
        public void MaxSizeTest()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                50, ObjectDetectorSearchMode.Default);

            Bitmap bmp = Properties.Resources.lena_color;

            Rectangle[] result;

            target.MaxSize = new Size(10, 60);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(0, result.Length);

            target.MaxSize = new Size(60, 60);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(1, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }

            target.MaxSize = new Size(80, 80);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(2, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width <= target.MaxSize.Width);
                Assert.IsTrue(r.Height <= target.MaxSize.Height);
            }
        }
コード例 #2
0
        //var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

        public Bitmap ProcessFrame(Bitmap inputImage)
        {
            Image <Bgr, byte> imageFrame = new Image <Bgr, byte>(new Bitmap(inputImage));

            // In this example, we will be creating a cascade for a Face detector:
            // var cascade = HaarCascade.FromXml("filename.xml"); (OpenCV 2.0 format)

            //not overlapping objects!!
            //min size 50 pixels
            var detector = new HaarObjectDetector(_cascade, minSize: 50,
                                                  searchMode: ObjectDetectorSearchMode.NoOverlap);

            Rectangle[] rectangles = detector.ProcessFrame(inputImage);

            foreach (var rectangle in rectangles)
            {
                imageFrame.ROI = rectangle;
                var facePatch = GetBlackRectangle(rectangle);
                CvInvoke.cvCopy(facePatch, imageFrame, IntPtr.Zero);

                //imageFrame.Draw(rectangle, new Bgr(Color.Indigo), 3);
            }
            imageFrame.ROI = Rectangle.Empty;
            return(imageFrame.Bitmap);
        }
コード例 #3
0
ファイル: MainForm.cs プロジェクト: LiDamon/OCR
        private void button1_Click(object sender, EventArgs e)
        {
            detector.SearchMode            = (ObjectDetectorSearchMode)cbMode.SelectedValue;
            detector.ScalingMode           = (ObjectDetectorScalingMode)cbScaling.SelectedValue;
            detector.ScalingFactor         = 1.5f;
            detector.MinSize               = new Size(32, 32);
            detector.UseParallelProcessing = cbParallel.Checked;

            Stopwatch sw = Stopwatch.StartNew();

            // prepare grayscale image
            picture = Accord.Imaging.Image.Clone(new Bitmap(pictureBox1.Image), System.Drawing.Imaging.PixelFormat.Format24bppRgb);

            // Process frame to detect objects
            Rectangle[] objects = detector.ProcessFrame(picture);

            sw.Stop();


            if (objects.Length > 0)
            {
                RectanglesMarker marker = new RectanglesMarker(objects, Color.Red);
                pictureBox1.Image = marker.Apply(picture);
            }

            toolStripStatusLabel1.Text = string.Format("Completed detection of {0} objects in {1}.",
                                                       objects.Length, sw.Elapsed);
        }
コード例 #4
0
        private Bitmap FacePicDetect(Bitmap bitmap)
        {
            lock (this)
            {
                //留存原始的照片,如果正在拍照,则保存此未添加人脸识别框的照片
                currentPicture = (Bitmap)bitmap.Clone();

                if (detector == null)
                {
                    //先实例化用于检测人脸的对象detector
                    detector = new HaarObjectDetector(new FaceHaarCascade(), 100)
                    {
                        SearchMode            = ObjectDetectorSearchMode.Single,            //搜索模式
                        ScalingMode           = ObjectDetectorScalingMode.GreaterToSmaller, //缩放模式
                        ScalingFactor         = 1.5f,                                       //在搜索期间重新缩放搜索窗口时要使用的重新缩放因子
                        UseParallelProcessing = true
                    };                                                                      //面部级联对象 + 搜索对象时使用的最小窗口大小
                }

                // 开始对检测区域进行检测并返回结果数组
                Rectangle[] regions = detector.ProcessFrame(bitmap);
                if (regions != null && regions.Length > 0)
                {
                    //人脸标记
                    RectanglesMarker marker = new RectanglesMarker(regions, Color.Orange);
                    regions = null;
                    return(marker.Apply(bitmap));
                }
                regions = null;
                return(bitmap);
            }
        }
コード例 #5
0
        public void ProcessFrame3()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                15, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.three;

            target.ProcessFrame(bmp);

            Assert.AreEqual(3, target.DetectedObjects.Length);
            Assert.AreEqual(180, target.DetectedObjects[0].X);
            Assert.AreEqual(275, target.DetectedObjects[0].Y);
            Assert.AreEqual(41, target.DetectedObjects[0].Width);
            Assert.AreEqual(41, target.DetectedObjects[0].Height);

            Assert.AreEqual(168, target.DetectedObjects[1].X);
            Assert.AreEqual(144, target.DetectedObjects[1].Y);
            Assert.AreEqual(49, target.DetectedObjects[1].Width);
            Assert.AreEqual(49, target.DetectedObjects[1].Height);

            Assert.AreEqual(392, target.DetectedObjects[2].X);
            Assert.AreEqual(133, target.DetectedObjects[2].Y);
            Assert.AreEqual(59, target.DetectedObjects[2].Width);
            Assert.AreEqual(59, target.DetectedObjects[2].Height);


            target = new HaarObjectDetector(cascade,
                                            15, ObjectDetectorSearchMode.Single);

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
        }
コード例 #6
0
        public static Rectangle detectFace(Bitmap bitmap)
        {
            var rectangle = new Rectangle();
            var cascade   = new FaceHaarCascade();
            var detector  = new HaarObjectDetector(cascade, 600);

            detector.SearchMode            = ObjectDetectorSearchMode.Default;
            detector.ScalingFactor         = 1.5F;
            detector.ScalingMode           = ObjectDetectorScalingMode.SmallerToGreater;
            detector.UseParallelProcessing = true;
            detector.Suppression           = 3;

            var faceObjects  = detector.ProcessFrame(bitmap);
            var possbleFaces = new List <Rectangle>();

            foreach (var face in faceObjects)
            {
                if (face.Width > 100 && face.Height > 100)
                {
                    possbleFaces.Add(face);
                }
            }
            if (possbleFaces.Count > 0)
            {
                int x      = possbleFaces.Sum((r) => r.X) / possbleFaces.Count;
                int y      = possbleFaces.Sum((r) => r.Y) / possbleFaces.Count;
                int width  = possbleFaces.Sum((r) => r.Width) / possbleFaces.Count;
                int height = possbleFaces.Sum((r) => r.Width) / possbleFaces.Count;
                rectangle = new Rectangle(x, y, width, height);
            }
            return(rectangle);
        }
コード例 #7
0
        public void ProcessFrame2()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                30, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_gray;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);


            target = new HaarObjectDetector(cascade,
                                            30, ObjectDetectorSearchMode.Default);

            target.ProcessFrame(bmp);

            Assert.AreEqual(6, target.DetectedObjects.Length);
            Assert.AreEqual(255, target.DetectedObjects[0].X);
            Assert.AreEqual(225, target.DetectedObjects[0].Y);
            Assert.AreEqual(123, target.DetectedObjects[0].Width);
            Assert.AreEqual(123, target.DetectedObjects[0].Height);
        }
コード例 #8
0
ファイル: Form1.cs プロジェクト: homerokzam/AccordNet
        public void Init(PictureBox pb)
        {
            FilterInfoCollection VideoCaptuerDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
            VideoCaptureDevice   FinalVideoSource    = new VideoCaptureDevice(VideoCaptuerDevices[0].MonikerString);

            FinalVideoSource.NewFrame += new NewFrameEventHandler((sender, eventArgs) =>
            {
                Bitmap image = (Bitmap)eventArgs.Frame.Clone();

                FaceHaarCascade cascade     = new FaceHaarCascade();
                HaarObjectDetector detector = new HaarObjectDetector(cascade, 80);

                detector.UseParallelProcessing = true;

                Rectangle[] faces = detector.ProcessFrame(image);

                Graphics g = Graphics.FromImage(image);
                foreach (var face in faces)
                {
                    Pen p = new Pen(Color.Red, 10f);
                    g.DrawRectangle(p, face);
                }
                g.Dispose();

                pb.Image = image;
            });

            FinalVideoSource.DesiredFrameRate = 1;
            FinalVideoSource.DesiredFrameSize = new Size(1, 500);
            FinalVideoSource.Start();
        }
コード例 #9
0
        private void button1_Click(object sender, EventArgs e)
        {
            detector.SearchMode            = (ObjectDetectorSearchMode)cbMode.SelectedValue;
            detector.ScalingMode           = (ObjectDetectorScalingMode)cbScaling.SelectedValue;
            detector.ScalingFactor         = 1.5f;
            detector.UseParallelProcessing = cbParallel.Checked;
            detector.Suppression           = 2;

            Stopwatch sw = Stopwatch.StartNew();


            // Process frame to detect objects
            Rectangle[] objects = detector.ProcessFrame(picture);


            sw.Stop();


            if (objects.Length > 0)
            {
                RectanglesMarker marker = new RectanglesMarker(objects, Color.Fuchsia);
                pictureBox1.Image = marker.Apply(picture);
            }

            toolStripStatusLabel1.Text = string.Format("Completed detection of {0} objects in {1}.",
                                                       objects.Length, sw.Elapsed);
        }
コード例 #10
0
        static void Main(string[] args)
        {
            ////ApplyBiTonal
            //Bitmap b = (Bitmap)Image.FromFile("test.png");
            //Processor.ApplyBiTonal(ref b, (byte.MaxValue * 3) / 2, System.Drawing.Color.Red, System.Drawing.Color.White);
            //b.Save("result.png");


            //WebClient wc = new WebClient();
            //byte[] bytes = wc.DownloadData("https://lpportalvhds5rnqyvf495z0.blob.core.windows.net/imagger/408fcdc5-8e91-4dc9-ada8-87287c8cf77f-orinal.jpeg");
            //MemoryStream ms = new MemoryStream(bytes);
            //Bitmap imageBitmap = new Bitmap(ms);
            //Processor.ApplyBrightness(ref imageBitmap, 100);
            ////Processor.ApplyBiTonal(ref imageBitmap, (byte.MaxValue * 3) / 2, System.Drawing.Color.Red, System.Drawing.Color.White);
            //imageBitmap.Save("result.png");


            //WebClient wc = new WebClient();
            //byte[] bytes = wc.DownloadData("https://lpportalvhds5rnqyvf495z0.blob.core.windows.net/imagger/157bed84-5f10-4f43-a5e5-7f0d146e3452-orinal.jpeg");
            //MemoryStream ms = new MemoryStream(bytes);
            //Bitmap imageBitmap = new Bitmap(ms);
            //Processor.ApplyRandomJitter(ref imageBitmap, 20); //, System.Drawing.Color.Red, System.Drawing.Color.White);
            //imageBitmap.Save("result.png");

            //Bitmap br = (Bitmap)Image.FromFile("test.png");
            //Processor.ApplyBrightness(ref br, 100);
            //br.Save("result-br.png");



            #region Detect faces

            WebClient    wc      = new WebClient();
            byte[]       bytes   = wc.DownloadData("https://scontent-fra3-1.xx.fbcdn.net/v/t1.0-1/c0.46.200.200/1724176_10151691353752537_1722497807_n.jpg?oh=19d89af364674bd704cd38613135b4e1&oe=583097F3");
            MemoryStream ms      = new MemoryStream(bytes);
            Bitmap       picture = new Bitmap(ms);

            HaarObjectDetector detector;
            HaarCascade        cascade = new FaceHaarCascade();
            detector = new HaarObjectDetector(cascade, 30);

            detector.SearchMode            = ObjectDetectorSearchMode.NoOverlap;
            detector.ScalingMode           = ObjectDetectorScalingMode.GreaterToSmaller;
            detector.ScalingFactor         = 1.5f;
            detector.UseParallelProcessing = true;

            Rectangle[] objects = detector.ProcessFrame(picture);
            // RandomJitter.ApplyRandomJitterOnRectangulars(ref picture, 20, objects);
            //   RandomJitter.ApplyRandomJitterOnRectangulars2(ref picture, 20, objects);

            RandomJitter.ApplyAddNoise(ref picture, objects);
            picture.Save("result.png");

            //  var p = RandomJitter.ApplyColorMatrix(picture, objects);
            //p.Save("result.png");

            #endregion
        }
コード例 #11
0
        public static bool HaveFace(Bitmap imagePassport)
        {
            Rectangle          cloneRect    = new Rectangle((int)(imagePassport.Width * 0.05), (int)(imagePassport.Height / 2.0 + imagePassport.Width * 0.129), (int)(imagePassport.Width * 0.28), (int)(imagePassport.Width * 0.357));
            Bitmap             cloneBitmap  = imagePassport.Clone(cloneRect, imagePassport.PixelFormat);
            HaarObjectDetector faceDetector = new HaarObjectDetector(new FaceHaarCascade(), minSize: 70, searchMode: ObjectDetectorSearchMode.Average);
            // распознаём лица
            IEnumerable <Rectangle> face = faceDetector.ProcessFrame(cloneBitmap);

            return(face.Count() > 0);
        }
コード例 #12
0
        private void btnTrainFace_Click(object sender, EventArgs e)
        {
            try
            {
                detector                       = new HaarObjectDetector(cascade, 30);
                detector.SearchMode            = ObjectDetectorSearchMode.Single;//.NoOverlap;//.Default; //.Average;
                detector.ScalingFactor         = 1.5f;
                detector.ScalingMode           = ObjectDetectorScalingMode.GreaterToSmaller;
                detector.UseParallelProcessing = true;
                detector.Suppression           = 3;

                Bitmap            picture = new Bitmap(pictureBox1.Image);
                Image <Bgr, byte> Frame   = new Image <Bgr, byte>(picture);

                Stopwatch   sw          = Stopwatch.StartNew();
                Rectangle[] faceObjects = detector.ProcessFrame(picture);
                sw.Stop();

                if (faceObjects.Length > 0)
                {
                    RectanglesMarker marker = new RectanglesMarker(faceObjects, Color.Fuchsia);
                    pictureBox1.Image = marker.Apply(picture);

                    //Graphics g = Graphics.FromImage(pictureBox1.Image);
                    foreach (var face in faceObjects)
                    {
                        //g.DrawRectangle(Pens.DeepSkyBlue, face);
                        Frame.Draw(face, new Bgr(Color.Red), 3);
                        Bitmap   c   = Frame.ToBitmap();
                        Bitmap   bmp = new Bitmap(face.Width, face.Height);
                        Graphics gg  = Graphics.FromImage(bmp);
                        gg.DrawImage(c, 0, 0, face, GraphicsUnit.Pixel);
                        pictureBox2.Image = bmp;
                        //bmp.Save("myface(accord) " + DateTime.Now.Second.ToString());
                        gg.Dispose();
                    }
                    //g.Dispose();
                    //label1.Text = "Completed operation!! " + faceObjects.Length.ToString() + " Face detected";
                    MessageBox.Show("Train Face operation successful!!! " + faceObjects.Length.ToString() + " Face detected", "Train face", MessageBoxButtons.OK, MessageBoxIcon.Information);
                    txtFaceId.Text       = genFaceId();
                    txtUsername.Text     = "User" + txtFaceId.Text;
                    btnUpload.Enabled    = false;
                    btnTrainFace.Enabled = false;
                    btnSave.Enabled      = true;
                }
                else
                {
                    MessageBox.Show("Image cannot be trained!!! No face detected in the current image", "Fail to Train face", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
            catch (Exception er)
            {
                MessageBox.Show(er.Message, "Face Detection and Recognition Failure", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
コード例 #13
0
        /// <summary>
        /// Processes the image.
        /// </summary>
        /// <param name="factory">
        /// The current instance of the <see cref="T:ImageProcessor.ImageFactory"/> class containing
        /// the image to process.
        /// </param>
        /// <returns>
        /// The processed image from the current instance of the <see cref="T:ImageProcessor.ImageFactory"/> class.
        /// </returns>
        public Image ProcessImage(ImageFactory factory)
        {
            Bitmap newImage = null;
            Bitmap grey     = null;
            Image  image    = factory.Image;

            try
            {
                HaarCascade cascade = this.DynamicParameter;
                grey = new Bitmap(image.Width, image.Height);
                grey.SetResolution(image.HorizontalResolution, image.VerticalResolution);
                grey = MatrixFilters.GreyScale.TransformImage(image, grey);

                HaarObjectDetector detector = new HaarObjectDetector(cascade)
                {
                    SearchMode    = ObjectDetectorSearchMode.NoOverlap,
                    ScalingMode   = ObjectDetectorScalingMode.GreaterToSmaller,
                    ScalingFactor = 1.5f
                };

                // Process frame to detect objects
                Rectangle[] rectangles = detector.ProcessFrame(grey);
                grey.Dispose();

                newImage = new Bitmap(image);
                newImage.SetResolution(image.HorizontalResolution, image.VerticalResolution);
                using (Graphics graphics = Graphics.FromImage(newImage))
                {
                    using (Pen blackPen = new Pen(Color.White))
                    {
                        blackPen.Width = 4;
                        graphics.DrawRectangles(blackPen, rectangles);
                    }
                }

                image.Dispose();
                image = newImage;
            }
            catch (Exception ex)
            {
                if (grey != null)
                {
                    grey.Dispose();
                }

                if (newImage != null)
                {
                    newImage.Dispose();
                }

                throw new ImageProcessingException("Error processing image with " + this.GetType().Name, ex);
            }

            return(image);
        }
コード例 #14
0
        private Bitmap FaceFind(Bitmap bmpBitmap)
        {
            HaarObjectDetector faceDetector = new HaarObjectDetector(new FaceHaarCascade(), minSize: 300, searchMode: ObjectDetectorSearchMode.Single);
            RectanglesMarker   faceMarker   = new RectanglesMarker(Color.Red)
            {
                Rectangles = faceDetector.ProcessFrame(bmpBitmap)
            };

            faceMarker.ApplyInPlace(bmpBitmap);
            return(bmpBitmap);
        }
コード例 #15
0
 /// <summary>
 /// Process an image and obtain haar features
 /// </summary>
 /// <param name="input">Image to have face detection performed on.</param>
 /// <returns>Rectangle[] which represents all detected faces</returns>
 private Rectangle[] ProcessImage(double[,] input)
 {
     try {
         Task <Rectangle[]> getFaces = Task.Run(() => _faceDetector.ProcessFrame(input));
         return(getFaces.Result);
     }
     catch (Exception e) {
         // ignored
     }
     return(null);
 }
コード例 #16
0
 internal List <Rectangle> ExtractFaces(Bitmap picture, FaceDetectorParameters faceDetectorParameters)
 {
     _detector.MinSize               = new Size(faceDetectorParameters.MinimumSize, faceDetectorParameters.MinimumSize);
     _detector.ScalingFactor         = faceDetectorParameters.ScalingFactor;
     _detector.ScalingMode           = faceDetectorParameters.ScalingMode;
     _detector.SearchMode            = faceDetectorParameters.SearchMode;
     _detector.UseParallelProcessing = faceDetectorParameters.UseParallelProcessing;
     _detector.MaxSize               = new Size(600, 600);
     _detector.Suppression           = faceDetectorParameters.Suppression;
     return(_detector.ProcessFrame(picture, (x) => { Logging.Log(x); })
            .ToList());
 }
コード例 #17
0
        static void TestHaar()
        {
            if (Environment.Is64BitProcess)
            {
                throw new Exception("Run in 32-bits");
            }

            // Let's test the detector using a sample video from
            // the collection of test videos in the framework:
            TestVideos ds       = new TestVideos();
            string     fileName = ds["crowd.mp4"];

            // In this example, we will be creating a cascade for a Face detector:
            var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

            // Now, create a new Haar object detector with the cascade:
            var detector = new HaarObjectDetector(cascade, minSize: 25,
                                                  searchMode: ObjectDetectorSearchMode.Average,
                                                  scalingMode: ObjectDetectorScalingMode.SmallerToGreater,
                                                  scaleFactor: 1.1f)
            {
                Suppression = 5 // This should make sure we only report regions as faces if
                                // they have been detected at least 5 times within different cascade scales.
            };

            // Now, let's open the video using FFMPEG:
            var video = new VideoFileReader();

            video.Open(fileName);

            Stopwatch sw = Stopwatch.StartNew();

            // Now, for each frame of the video
            for (int frameIndex = 0; frameIndex < video.FrameCount; frameIndex++)
            {
                // Read the current frame into the bitmap data
                Bitmap bmp = video.ReadVideoFrame(frameIndex);

                // Feed the frame to the tracker
                Rectangle[] faces = detector.ProcessFrame(bmp);

                Console.WriteLine(faces.Length);
                Console.WriteLine(bmp.Flags);
            }

            sw.Stop();

            Console.WriteLine(sw.Elapsed);

            video.Close();
        }
コード例 #18
0
        public void MinSizeTest()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                50, ObjectDetectorSearchMode.Default);

            Bitmap bmp = Accord.Imaging.Image.Clone(Resources.lena_color);

            Rectangle[] result;

            target.MinSize = new Size(10, 60);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(3, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width >= target.MinSize.Width);
                Assert.IsTrue(r.Height >= target.MinSize.Height);
            }


            target.MinSize = new Size(85, 85);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(2, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width >= target.MinSize.Width);
                Assert.IsTrue(r.Height >= target.MinSize.Height);
            }

            target.MinSize = new Size(1, 1);
            result         = target.ProcessFrame(bmp);
            Assert.AreEqual(4, result.Length);
            foreach (var r in result)
            {
                Assert.IsTrue(r.Width >= target.MinSize.Width);
                Assert.IsTrue(r.Height >= target.MinSize.Height);
            }
        }
コード例 #19
0
        private Bitmap Detecting(Bitmap _bitmap)
        {
            UnmanagedImage im = UnmanagedImage.FromManagedImage(_bitmap);

            float xscale = im.Width / 160f;
            float yscale = im.Height / 120f;

            ResizeNearestNeighbor resize     = new ResizeNearestNeighbor(160, 120);
            UnmanagedImage        downsample = resize.Apply(im);


            Rectangle[] regions = detector.ProcessFrame(downsample);


            if (regions.Length > 0)
            {
                tracker.Reset();

                // Reduce the face size to avoid tracking background
                Rectangle window = new Rectangle(
                    (int)((regions[0].X + regions[0].Width / 2f) * xscale),
                    (int)((regions[0].Y + regions[0].Height / 2f) * yscale),
                    1, 1);

                window.Inflate(
                    (int)(0.2f * regions[0].Width * xscale),
                    (int)(0.4f * regions[0].Height * yscale));

                this.FaceTracked = window;

                // Initialize tracker
                tracker.SearchWindow = window;
                tracker.ProcessFrame(im);

                marker = new RectanglesMarker(window);
                marker.ApplyInPlace(im);

                // (Bitmap) Helpers.BitmapHelper.ByteArrayToImage(Helpers.RijndaelHelper.EncryptBytes(Helpers.BitmapHelper.ImageToByte(im.ToManagedImage()), "fzafa", "afzd"))

                this.isTracking = true;

                return(im.ToManagedImage());
            }
            else
            {
                this.isDetecting = false;
                return(_bitmap);
            }
        }
コード例 #20
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            Mat frame = new Mat();

            frame = cap.QueryFrame();
            Image <Bgr, byte>  frameImage     = frame.ToImage <Bgr, byte>();
            Image <Gray, byte> grayFrameImage = frameImage.Convert <Gray, byte>();

            if (frame != null)
            {
                Rectangle[]      faces  = detector.ProcessFrame(grayFrameImage.ToBitmap());
                RectanglesMarker marker = new RectanglesMarker(faces, Color.Fuchsia);
                imageViewer.Image = new Image <Bgr, byte>(marker.Apply(frame.Bitmap));
            }
        }
コード例 #21
0
        public void ProcessFrame()
        {
            HaarCascade        cascade = new FaceHaarCascade();
            HaarObjectDetector target  = new HaarObjectDetector(cascade,
                                                                50, ObjectDetectorSearchMode.NoOverlap);

            Bitmap bmp = Properties.Resources.lena_color;

            target.ProcessFrame(bmp);

            Assert.AreEqual(1, target.DetectedObjects.Length);
            Assert.AreEqual(126, target.DetectedObjects[0].X);
            Assert.AreEqual(112, target.DetectedObjects[0].Y);
            Assert.AreEqual(59, target.DetectedObjects[0].Width);
            Assert.AreEqual(59, target.DetectedObjects[0].Height);
        }
コード例 #22
0
ファイル: HomeController.cs プロジェクト: Mo-rahat/MyProject
        public static List <Rectangle> DetectFaces(Bitmap image)
        {
            List <Rectangle>   xfaces = new List <Rectangle>();
            HaarObjectDetector detector;

            detector = new HaarObjectDetector(new FaceHaarCascade(), 20, ObjectDetectorSearchMode.Average, 1.1f, ObjectDetectorScalingMode.SmallerToGreater);
            detector.UseParallelProcessing = true;
            detector.Suppression           = 2;
            var grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);
            HistogramEqualization filter = new HistogramEqualization();

            filter.ApplyInPlace(grayImage);
            Rectangle[] faces = detector.ProcessFrame(grayImage);
            xfaces.AddRange(faces);
            return(xfaces);
        }
コード例 #23
0
        public Bitmap Detect()
        {
            _detector.ScalingFactor         = 1.5f;
            _detector.UseParallelProcessing = true;

            Rectangle[] objects = _detector.ProcessFrame(image);


            if (objects.Length > 0)
            {
                RectanglesMarker marker = new RectanglesMarker(objects, Color.Yellow);
                image = marker.Apply(image);
            }

            return(image);
        }
コード例 #24
0
        private void button1_Click(object sender, EventArgs e)
        {
            try
            {
                detector                       = new HaarObjectDetector(cascade, 30);
                detector.SearchMode            = ObjectDetectorSearchMode.Single;            //.Default;//.NoOverlap;//
                detector.ScalingFactor         = 2.5f;
                detector.ScalingMode           = ObjectDetectorScalingMode.SmallerToGreater; //.GreaterToSmaller;
                detector.UseParallelProcessing = true;
                detector.Suppression           = 3;

                Bitmap            picture = new Bitmap(pictureBox2.Image);
                Image <Bgr, byte> Frame   = new Image <Bgr, byte>(picture);

                Stopwatch sw = Stopwatch.StartNew();

                Rectangle[] faceObjects = detector.ProcessFrame(picture);

                sw.Stop();

                if (faceObjects.Length > 0)
                {
                    RectanglesMarker marker = new RectanglesMarker(faceObjects, Color.Fuchsia);
                    pictureBox2.Image = marker.Apply(picture);
                }
                label1.Text = "Operation Completed!!! " + faceObjects.Length.ToString() + " Face detected";

                Graphics g = Graphics.FromImage(pictureBox2.Image);
                foreach (var face in faceObjects)
                {
                    g.DrawRectangle(Pens.DeepSkyBlue, face);
                    Frame.Draw(face, new Bgr(Color.Red), 3);
                    Bitmap   c   = Frame.ToBitmap();
                    Bitmap   bmp = new Bitmap(face.Width, face.Height);
                    Graphics gg  = Graphics.FromImage(bmp);
                    gg.DrawImage(c, 0, 0, face, GraphicsUnit.Pixel);
                    //pictureBox2.Image = bmp;
                    bmp.Save("myface(accord) " + DateTime.Now.Second.ToString());
                    MessageBox.Show("Face Detected!!!, Face Save as:" + "myface(accord) " + DateTime.Now.Second.ToString(), "Face Detection Successfully", MessageBoxButtons.OK, MessageBoxIcon.Information);
                }
                //g.Dispose()
            }
            catch (Exception er)
            {
                MessageBox.Show(er.Message, "Face Detection Failure", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
コード例 #25
0
 public FaceDetectionResult Process(Bitmap picture)
 {
     try
     {
         // Process frame to detect objects
         Rectangle[] objects = detector.ProcessFrame(picture);
         if (objects.Length == 0)
         {
             return(FaceDetectionResult.FaceNotFound);
         }
         return(objects.Length > 1 ? FaceDetectionResult.MultipleFacesFound : FaceDetectionResult.OneFaceFound);
     }
     catch (Exception e)
     {
         return(FaceDetectionResult.Error);
     }
 }
コード例 #26
0
        /// <summary>
        /// Called when videoPlayer receives a new frame.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="image"></param>
        private void videoPlayer_NewFrame(object sender, ref Bitmap image)
        {
            // grayscale the image
            var grayscale = new GrayscaleBT709();
            var frame     = grayscale.Apply(image);

            // only detect once every 5 frames
            if (frameCounter++ % 5 == 0)
            {
                // scan the image and grab the first face
                var faces = faceDetector.ProcessFrame(frame);
                if (faces.Count() > 0)
                {
                    // copy the face
                    faceRect = faces.First();
                    var face = image.Clone(faces.First(), PixelFormat.DontCare);

                    // search the face for the nose
                    var noses = noseDetector.ProcessFrame(face);
                    if (noses.Count() > 0)
                    {
                        // record new nose location
                        noseRect = noses.First();
                        noseRect.Offset(faceRect.Location);
                    }
                }
            }

            // draw the mustache
            using (Graphics g = Graphics.FromImage(image))
            {
                // we want a mustache three times as wide as the nose
                int mustacheWidth  = 2 * noseRect.Width;
                int mustacheHeight = mustacheWidth * mustache.Height / mustache.Width;

                // center the mustache on the bottom of the nose
                int x1 = noseRect.X - (mustacheWidth / 4);
                int x2 = noseRect.X + noseRect.Width + (mustacheWidth / 4);
                int y1 = noseRect.Y + noseRect.Height - (mustacheHeight / 2);
                int y2 = noseRect.Y + noseRect.Height + (mustacheHeight / 2);

                // draw the mustache
                g.DrawImage(mustache, x1, y1, x2 - x1, y2 - y1);
            }
        }
コード例 #27
0
        public void ProcessFrame()
        {
            #region doc_example
            // In order to use a HaarObjectDetector, first we have to tell it
            // which type of objects we would like to detect. And in a Haar detector,
            // different object classifiers are specified in terms of a HaarCascade.

            // The framework comes with some built-in cascades for common body
            // parts, such as Face and Nose. However, it is also possible to
            // load a cascade from cascade XML definitions in OpenCV 2.0 format.

            // In this example, we will be creating a cascade for a Face detector:
            var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade();

            // Note: In the case we would like to load it from XML, we could use:
            // var cascade = HaarCascade.FromXml("filename.xml");

            // Now, create a new Haar object detector with the cascade:
            var detector = new HaarObjectDetector(cascade, minSize: 50,
                                                  searchMode: ObjectDetectorSearchMode.NoOverlap);

            // Note that we have specified that we do not want overlapping objects,
            // and that the minimum object an object can have is 50 pixels. Now, we
            // can use the detector to classify a new image. For instance, consider
            // the famous Lena picture:

            Bitmap bmp = Accord.Imaging.Image.Clone(Resources.lena_color);

            // We have to call ProcessFrame to detect all rectangles containing the
            // object we are interested in (which in this case, is the face of Lena):
            Rectangle[] rectangles = detector.ProcessFrame(bmp);

            // The answer will be a single rectangle of dimensions
            //
            //   {X = 126 Y = 112 Width = 59 Height = 59}
            //
            // which indeed contains the only face in the picture.
            #endregion

            Assert.AreEqual(1, detector.DetectedObjects.Length);
            Assert.AreEqual(126, detector.DetectedObjects[0].X);
            Assert.AreEqual(112, detector.DetectedObjects[0].Y);
            Assert.AreEqual(59, detector.DetectedObjects[0].Width);
            Assert.AreEqual(59, detector.DetectedObjects[0].Height);
        }
コード例 #28
0
        private void video_NewFrame(object sender, NewFrameEventArgs eventArgs)
        {
            // get new frame
            Bitmap bitmap = eventArgs.Frame;

            // process the frame
            var faces = Detector.ProcessFrame(bitmap);

            //if (faces.Length > 0)
            //	Debug.WriteLine("Total Objects Detected: " + faces.Length);

            if (faces.Length > 0)
            {
                FaceRect = faces[0];
            }

            FollowFace(FaceRect);
        }
コード例 #29
0
        public static string DetectFace(string path)
        {
            GetPicture(path);
            // Process frame to detect objects
            HaarCascade        cascade  = new FaceHaarCascade();
            HaarObjectDetector detector = new HaarObjectDetector(cascade, 30);

            detector.SearchMode    = (ObjectDetectorSearchMode)cbMode;
            detector.ScalingMode   = (ObjectDetectorScalingMode)cbScaling;
            detector.ScalingFactor = 1.5f;

            Stopwatch sw = Stopwatch.StartNew();

            Rectangle[] objects = detector.ProcessFrame(picture);

            sw.Stop();

            if (objects.Length > 0)
            {
                Console.WriteLine("here");
                RectanglesMarker marker = new RectanglesMarker(objects, Color.Fuchsia);
                picture = marker.Apply(picture);
            }
            if (picture != null)
            {
                Console.WriteLine("trying to print picture");

                /*ImageCodecInfo myici = ImageCodecInfo.GetImageEncoders();
                 * int numCodecs = myici.GetLength(0);
                 * Encoder myEncoder = Encoder.Quality;
                 * EncoderParameters param = new EncoderParameters(1);
                 * EncoderParameter param1 = new EncoderParameter(myEncoder, 25L);
                 *
                 * picture.Save(@"output.jpg", myici, param);
                 */

                picture.Save("file.png", ImageFormat.Png);;
            }
            return(path);
        }
コード例 #30
0
        public static void Main(string[] args)
        {
            var picture = Resource.lena_color;

            HaarCascade cascade  = new FaceHaarCascade();
            var         detector = new HaarObjectDetector(cascade, 30);

            //detector.SearchMode = ObjectDetectorSearchMode.Average;
            //detector.ScalingMode = ObjectDetectorScalingMode.GreaterToSmaller;
            //detector.ScalingFactor = 1.5f;
            //detector.UseParallelProcessing = false;
            //detector.Suppression = 2;

            Rectangle[] objects = detector.ProcessFrame(picture);

            if (objects.Length > 0)
            {
                RectanglesMarker marker = new RectanglesMarker(objects, Color.Fuchsia);
                var markedup            = marker.Apply(picture);
                markedup.Save($"{basedir}{sep}{sep}markedup.jpg");
            }
        }