Esempio n. 1
0
        static void Main(string[] args)
        {
            // 画像の取得
            var img = new Bitmap(@"src.jpg");
            // グレースケール化
            //var gray = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721).Apply(img);
            // カスケード識別器の読み込み
            var cascadeFace = Accord.Vision.Detection.Cascades.FaceHaarCascade.FromXml(@"haarcascade_frontalface_default.xml");
            // Haar-Like特徴量による物体検出を行うクラスの生成
            var detectorFace = new Accord.Vision.Detection.HaarObjectDetector(cascadeFace);

            // 読み込んだ画像から顔の位置を検出(顔の位置はRectangle[]で返される)
            var faces = detectorFace.ProcessFrame(img);

            // 画像に検出された顔の位置を書き込みPictureBoxに表示
            var markerFaces = new Accord.Imaging.Filters.RectanglesMarker(faces, Color.Yellow);
            img = markerFaces.Apply(img);
            // 保存
            //Bitmap img2 = markerFaces.ToBitmap();
            img.Save(@"dst.jpg");
            img.Dispose();
        }
Esempio n. 2
0
        static void Main(string[] args)
        {
            // 画像の取得
            var img = new Bitmap(@"src.jpg");
            // グレースケール化
            //var gray = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721).Apply(img);
            // カスケード識別器の読み込み
            var cascadeFace = Accord.Vision.Detection.Cascades.FaceHaarCascade.FromXml(@"haarcascade_frontalface_default.xml");
            // Haar-Like特徴量による物体検出を行うクラスの生成
            var detectorFace = new Accord.Vision.Detection.HaarObjectDetector(cascadeFace);

            // 読み込んだ画像から顔の位置を検出(顔の位置はRectangle[]で返される)
            var faces = detectorFace.ProcessFrame(img);

            // 画像に検出された顔の位置を書き込みPictureBoxに表示
            var markerFaces = new Accord.Imaging.Filters.RectanglesMarker(faces, Color.Yellow);

            img = markerFaces.Apply(img);
            // 保存
            //Bitmap img2 = markerFaces.ToBitmap();
            img.Save(@"dst.jpg");
            img.Dispose();
        }
 /// <summary>
 /// Apply rectangle marker
 /// </summary>
 /// <param name="marker">Accord.Imaging.Filters.RectanglesMarker</param>
 /// <param name="um">AForge.Imaging.UnmanagedImage</param>
 /// <param name="rect">Rectangle</param>
 public static void Set(this Accord.Imaging.Filters.RectanglesMarker marker, ref AForge.Imaging.UnmanagedImage um, Rectangle rect)
 {
     marker.Rectangles = new Rectangle[] { rect };
     marker.ApplyInPlace(um);
 }
Esempio n. 4
0
        /// <summary>
        /// Image processor and the heart of sensor
        /// </summary>
        /// <param name="sender">object - owner</param>
        /// <param name="eventArgs">NewFrameEventArgs - args, contains frame from camera</param>
        private void processFrame(object sender, NewFrameEventArgs eventArgs)
        {
            Rectangle   rect = eventArgs.Frame.Bounds();
            PixelFormat pf   = eventArgs.Frame.PixelFormat;

            Bitmap frame = eventArgs.Frame.Clone(rect, pf);

            scaleX = frame.Width / processWidth;
            scaleY = frame.Height / processHeight;
            Bitmap frameFace = eventArgs.Frame.Clone(rect, pf);

            if (OnMotionDetected != null)
            {
                var dataMotion = frame.GetDirectAccess();
                var frameUI    = dataMotion.GetUnmanaged();
                if (motion.ProcessFrame(frameUI) > 0.15)
                {
                    updateMotion(true);
                }
                else
                {
                    updateMotion(false);
                }
                frame.UnlockBits(dataMotion);
            }

            if (OnFaceDetected != null)
            {
                var dataFace        = frameFace.GetDirectAccess();
                var faceUI          = dataFace.GetUnmanaged();
                var downsample      = faceUI.ResizeTo(processWidth, processHeight);
                var faceDetections  = detectorFace.ProcessFrame(downsample);
                var faceDetections2 = detectorFaceProfile.ProcessFrame(downsample);

                if (isPreview)
                {
                    if (faceDetections.Length > 0)
                    {
                        marker             = new Accord.Imaging.Filters.RectanglesMarker(faceDetections.Scale(scaleX, scaleY));
                        marker.MarkerColor = Color.Yellow;
                        frame = marker.Apply(frame);
                    }

                    if (faceDetections2.Length > 0)
                    {
                        marker             = new Accord.Imaging.Filters.RectanglesMarker(faceDetections2.Scale(scaleX, scaleY));
                        marker.MarkerColor = Color.Yellow;
                        frame = marker.Apply(frame);
                    }
                }


                frameFace.UnlockBits(dataFace);

                if (detectorFace.DetectedObjects != null && detectorFace.DetectedObjects.Length > 0)
                {
                    var faces = detectorFace.DetectedObjects.ToFaces((int)scaleX, (int)scaleY);
                    for (int i = 0; i < faces.Length; i++)
                    {
                        var cutter = new AForge.Imaging.Filters.Crop(faces[i].Bounds);
                        faces[i].FaceImage = cutter.Apply(frameFace);

                        if (searchForFaceDirection)
                        {
                            detectorNose.ProcessFrame(faces[i].FaceImage);
                            if (detectorNose.DetectedObjects.Length > 0)
                            {
                                faces[i].Direction = FaceDirection.Frontal;
                            }
                        }

                        var eyeDetections = detectorEye.ProcessFrame(faces[i].FaceImage);

                        if (eyeDetections.Length > 0)
                        {
                            if (eyeDetections.Length >= 1)
                            {
                                faces[i].Direction = FaceDirection.Frontal;
                            }

                            Eye[] eyes = new Eye[eyeDetections.Length];
                            for (int ie = 0; ie < eyes.Length; ie++)
                            {
                                eyes[ie]        = new Eye();
                                eyes[ie].Left   = faces[i].Left + eyeDetections[ie].X;
                                eyes[ie].Top    = faces[i].Top + eyeDetections[ie].Y;
                                eyes[ie].Width  = eyeDetections[ie].Width;
                                eyes[ie].Height = eyeDetections[ie].Height;
                                var cutter2 = new AForge.Imaging.Filters.Crop(eyes[ie].Bounds);
                                eyes[ie].EyeImage = cutter.Apply(frameFace);
                            }

                            if (isPreview)
                            {
                                marker             = new Accord.Imaging.Filters.RectanglesMarker(eyes.toRects());
                                marker.MarkerColor = Color.Orange;
                                frame = marker.Apply(frame);
                            }

                            updateEyeDetected(eyes);
                        }
                    }
                    updateFaceDetected(faces);
                }
                else if (detectorFaceProfile.DetectedObjects != null && detectorFaceProfile.DetectedObjects.Length > 0)
                {
                    var faces = detectorFaceProfile.DetectedObjects.ToFaces((int)scaleX, (int)scaleY);
                    for (int i = 0; i < faces.Length; i++)
                    {
                        var cutter = new AForge.Imaging.Filters.Crop(faces[i].Bounds);
                        faces[i].FaceImage = cutter.Apply(frameFace);

                        if (searchForFaceDirection)
                        {
                            detectorEarLeft.ProcessFrame(faces[i].FaceImage);
                            if (detectorEarLeft.DetectedObjects.Length > 0)
                            {
                                faces[i].Direction = FaceDirection.TurnedRight;
                            }
                            else
                            {
                                detectorEarRight.ProcessFrame(faces[i].FaceImage);
                                if (detectorEarRight.DetectedObjects.Length > 0)
                                {
                                    faces[i].Direction = FaceDirection.TurnedLeft;
                                }
                                else
                                {
                                    faces[i].Direction = FaceDirection.NoInfo;
                                }
                            }
                        }
                    }
                    updateFaceDetected(faces);
                }
            }


            updateFrameReceived(frame);
        }