//彩色图片直方图均衡化
        public static Mat EqualizeHistForColorImage(Mat src)
        {
            Mat Image       = new Mat(src.Size(), src.Depth(), 3);
            int max_channel = 4;

            Mat[] ImageChannel = new Mat[4];
            int   i            = 0;

            for (i = 0; i < src.Channels(); i++)
            {
                ImageChannel[i] = new Mat(src.Size(), src.Depth(), 1);
            }
            Cv2.Split(src, out ImageChannel);
            for (i = 0; i < src.Channels(); i++)
            {
                Cv2.EqualizeHist(ImageChannel[i], ImageChannel[i]);
            }
            Cv2.Merge(ImageChannel, Image);
            return(Image);
        }
Exemple #2
0
        public void Detect(Mat frame)
        {
            var grayImage = new Mat();

            Cv2.CvtColor(frame, grayImage, ColorConversionCodes.BGRA2GRAY);
            Cv2.EqualizeHist(grayImage, grayImage);
            var bodies = cascade.DetectMultiScale(
                image: frame,
                scaleFactor: 1.1,
                minNeighbors: 2,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(30, 30)
                );

            //return bodies;
            Console.WriteLine("Detected bodies: {0}", bodies.Length);
            foreach (var faceRect in bodies)
            {
                Cv2.Rectangle(frame, faceRect, Scalar.Red, 2);
            }
        }
Exemple #3
0
        //Face detection
        private void DetectAndSaveImg(Mat image)
        {
            //Image to gray scale
            using (var grayImage = new Mat()) {
                Cv2.CvtColor(image, grayImage, ColorConversionCodes.BGR2GRAY);

                //Flattening the histogram
                Cv2.EqualizeHist(grayImage, grayImage);

                //Face recognition, Small faces excluded
                var mats = Cascade.DetectMultiScale(grayImage, 1.1, 3, 0, new Size(80, 80))
                           //Make rects focusing on facial parts
                           .Select(rect => new Rect(rect.X, rect.Y, rect.Width, rect.Height))
                           //Imaged cut out
                           .Select(image.Clone)
                           //Listing
                           .ToList();

                SaveImg(mats);
            }
        }
        public override void Apply(Mat input)
        {
            _start = DateTime.Now;
            Input  = input;
            if (IsActive)
            {
                Mat equ = new Mat();
                Cv2.CvtColor(input, equ, ColorConversionCodes.BGR2YCrCb);

                Mat[] Channels;
                Cv2.Split(equ, out Channels);
                Cv2.EqualizeHist(Channels[0], Channels[0]);
                Cv2.Merge(Channels, equ);
                Cv2.CvtColor(equ, Output, ColorConversionCodes.YCrCb2BGR);
            }
            else
            {
                Cv2.CopyTo(Input, Output);
            }

            base.Apply(input);
        }
Exemple #5
0
        private void button9_Click(object sender, EventArgs e)
        {
            Mat OriginalImage = new Mat(fileName, ImreadModes.Color);
            Mat hsvIm         = new Mat();

            Cv2.CvtColor(OriginalImage, hsvIm, ColorConversionCodes.BGR2HSV);

            Mat[] hsv;
            hsv = Cv2.Split(hsvIm);
            Cv2.EqualizeHist(hsv[2], hsv[2]);
            Mat res = new Mat();

            Cv2.Merge(hsv, res);


            Bitmap bp = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(hsv[2]);

            createHistV(bp);

            Cv2.CvtColor(res, res, ColorConversionCodes.HSV2BGR);
            showFilter(res, "brightness equalize");
        }
Exemple #6
0
        public static List <OpenCvSharp.Rect> DetectFaces(OpenCvSharp.Mat image)
        {
            List <OpenCvSharp.Rect> faces = new List <OpenCvSharp.Rect>();
            var facesCascade = HttpContext.Current.Server.MapPath("~/face.xml");

            using (OpenCvSharp.CascadeClassifier face = new OpenCvSharp.CascadeClassifier(facesCascade))
            {
                using (OpenCvSharp.Mat ugray = new OpenCvSharp.Mat())
                {
                    Cv2.CvtColor(image, ugray, ColorConversionCodes.BGRA2GRAY);
                    Cv2.EqualizeHist(ugray, ugray);
                    var facesDetected = face.DetectMultiScale(
                        image: ugray,
                        scaleFactor: 1.1,
                        minNeighbors: 10,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new OpenCvSharp.Size(20, 20));
                    faces.AddRange(facesDetected);
                }
            }
            return(faces);
        }
Exemple #7
0
        private Rect[] FaceDetect(Mat source_image)
        {
            // 그레이 스케일
            Mat frame_img_gray = new Mat();

            byte[] imageBytes = source_image.ToBytes(".bmp");
            //byte[] imageBytes = frame_img_FlipY.ToBytes(".bmp");
            frame_img_gray = Mat.FromImageData(imageBytes, ImreadModes.Grayscale);

            Cv2.EqualizeHist(frame_img_gray, frame_img_gray);
            var cascade = new CascadeClassifier("C://Users//dev-yym//source//repos//opencv_003//FaceML_Data//haarcascade_frontalface_alt.xml");
            //var nestedCascade = new CascadeClassifier("C://Users//dev-yym//source//repos//opencv_002//FaceML_Data//haarcascade_eye_tree_eyeglasses.xml");

            var faces = cascade.DetectMultiScale(
                image: frame_img_gray,
                scaleFactor: 1.1,
                minNeighbors: 2,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new OpenCvSharp.Size(30, 30)
                );

            return(faces);
        }
        private void button1_Click(object sender, EventArgs e)
        {
            video = new VideoCapture("D:\\project\\c#\\Day021\\vtest.avi");
            Random r = new Random();
            int    cnt = 0;
            int    x = 0, y = 0, W = 0, H = 0;

            int               sleepTime = (int)Math.Round(1000 / video.Fps);
            String            filenameBodyCascade = "D:\\project\\c#\\Day021\\haarcascade_fullbody.xml"; // haar-cascade 훈련 사용
            CascadeClassifier bodyCascade = new CascadeClassifier();

            if (!bodyCascade.Load(filenameBodyCascade))
            {
                Console.WriteLine("error");
                return;
            }

            inCvImage  = new Mat();
            outCvImage = new Mat();

            int  oH  = inCvImage.Height;
            int  oW  = inCvImage.Width;
            bool csi = false;

            while (true)
            {
                cnt = 0;

                video.Read(inCvImage);

                if (inCvImage.Empty())
                {
                    break;
                }
                // detect
                Rect[] body = bodyCascade.DetectMultiScale(inCvImage);
                Console.WriteLine(body.Length);

                foreach (var item in body)
                {
                    Scalar c = new Scalar(r.Next(0, 255), r.Next(0, 255), r.Next(0, 255));
                    Cv2.Rectangle(inCvImage, item, c); // add rectangle to the image
                    Console.WriteLine("body : " + item);
                    cnt++;

                    x = item.X;
                    y = item.Y;
                    W = item.Width;
                    H = item.Height;
                }

                if (cnt > 4)
                {
                    csi = true;
                }

                if (csi)
                {
                    outCvImage = Mat.Ones(new OpenCvSharp.Size(oW, oH), MatType.CV_8UC1);
                    Cv2.CvtColor(inCvImage, outCvImage, ColorConversionCodes.BGR2GRAY);
                    Cv2.AdaptiveThreshold(outCvImage, outCvImage, 255,
                                          AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 25, 5);
                }
                else
                {
                    outCvImage = inCvImage;
                }

                // display
                //Cv2.ImShow("CCTV", frame);

                picboxCow.ImageIpl = outCvImage;

                if (Cv2.WaitKey(9) == 27)
                {
                    inCvImage.Dispose(); outCvImage.Dispose();
                    video.Release();
                    break;
                }

                if (cnt > 7)
                {
                    video.Release();
                    break;
                }
            }


            // grayscale
            Cv2.CvtColor(inCvImage, outCvImage, ColorConversionCodes.BGR2GRAY);
            picboxCow.ImageIpl = outCvImage;

            Delay(2000); //각 변환 시각화

            //histogram equalizition을 통해서 픽셀를 고르게 분포
            Cv2.EqualizeHist(outCvImage, outCvImage);
            picboxCow.ImageIpl = outCvImage;

            Delay(2000);      //각 변환 시각화

            dst1 = new Mat(); //누적 image 작업을 위한 초기화
            dst2 = new Mat();


            Cv2.Resize(outCvImage, dst1, new OpenCvSharp.Size(1920, 1280), 0, 0, InterpolationFlags.Lanczos4);

            //무리 내의 사람 판독을 위해 따로 배출
            Rect rect = new Rect(x, y, W, H);

            dst1 = outCvImage.SubMat(rect);

            //image 크기와 맞게 picplcsi 설정
            picplCSi.Size     = new System.Drawing.Size(W, H);
            picplCSi.ImageIpl = dst1;
            lbf.Visible       = true;

            Delay(2000);//각 변환 시각화

            //보다 효율적으로 시각화 하기위한 사이즈 확대
            Cv2.Resize(dst1, dst2, new OpenCvSharp.Size(800, 600), 0, 0, InterpolationFlags.Lanczos4);

            //image 크기와 맞게 picplcsi 설정
            this.Size     = new System.Drawing.Size();
            picplCSi.Size = new System.Drawing.Size(800, 600);

            Cv2.ImShow("dst2", dst2);//image 출력

            Cv2.WaitKey(2000);

            //파일 내로 저장 후 메모리 누수 발생 방지를 위해 사용했던 Matrix Dispose시킵니다.
            string _saveName = "C:/images/" + DateTime.Now.ToString("yyyy/MM/dd_hh_mm_ss") + ".jpeg";

            Cv2.ImWrite(_saveName, dst2);
            inCvImage.Dispose();
            outCvImage.Dispose();
            dst1.Dispose();

            //Check Message
            MessageBox.Show("업로드", "Checking", MessageBoxButtons.OK, MessageBoxIcon.Information);
            Close();
        }
Exemple #9
0
        static void Main(string[] args)
        {
            var detectedFaceGrayImage = new Mat();
            var grayImage             = new Mat();
            var srcImage      = new Mat();
            var cascade       = new CascadeClassifier(@"..\..\Data\haarcascade_frontalface_alt.xml");
            var nestedCascade = new CascadeClassifier(@"..\..\Data\haarcascade_eye_tree_eyeglasses.xml");
            var smile         = new CascadeClassifier(@"..\..\Data\haarcascade_smile.xml");

            VideoCapture video = new VideoCapture(0);

            for (int i = 0; i < 1000; i++)
            {
                video.Read(srcImage);

                Cv2.CvtColor(srcImage, grayImage, ColorConversionCodes.BGRA2GRAY);
                Cv2.EqualizeHist(grayImage, grayImage);

                var faces = cascade.DetectMultiScale(
                    image: grayImage,
                    scaleFactor: 1.1,
                    minNeighbors: 2,
                    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                    minSize: new Size(30, 30)
                    );

                foreach (var faceRect in faces)
                {
                    var detectedFaceImage = new Mat(srcImage, faceRect);
                    Cv2.Rectangle(srcImage, faceRect, Scalar.Red, 2);

                    var nestedObjects = nestedCascade.DetectMultiScale(
                        image: grayImage,
                        scaleFactor: 1.1,
                        minNeighbors: 2,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new Size(30, 30)
                        );

                    foreach (var nestedObject in nestedObjects)
                    {
                        Cv2.Rectangle(srcImage, nestedObject, Scalar.YellowGreen, 2);
                    }

                    var nestedSmile = smile.DetectMultiScale(
                        image: grayImage,
                        scaleFactor: 3.1,
                        minNeighbors: 2,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new Size(30, 30)
                        );

                    foreach (var nestedObject in nestedSmile)
                    {
                        Cv2.Rectangle(srcImage, nestedObject, Scalar.Green, 2);
                    }
                }



                Cv2.ImShow("tela", srcImage);
                Cv2.WaitKey(1); // do events
            }

            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
            srcImage.Dispose();
        }
Exemple #10
0
    // Simple example of canny edge detect
    void ProcessImage(Mat _image)
    {
        Cv2.Flip(_image, _image, FlipMode.X);
        Cv2.Canny(_image, cannyImage, 100, 100);
        var grayImage = new Mat();

        Cv2.CvtColor(_image, grayImage, ColorConversionCodes.BGR2GRAY);
        Cv2.EqualizeHist(grayImage, grayImage);

        var face_cascade = new CascadeClassifier();

        face_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_frontalface_default.xml");
        var eye_cascade = new CascadeClassifier();

        eye_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_eye_tree_eyeglasses.xml");
        //var right_ear_cascade = new CascadeClassifier();
        //right_ear_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_mcs_leftear.xml");
        //var left_ear_cascade = new CascadeClassifier();
        //left_ear_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_mcs_rightear.xml");
        //var mouth_cascade = new CascadeClassifier();
        //mouth_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_mcs_mouth.xml");
        //Debug.Log(" ");

        var faces = face_cascade.DetectMultiScale(
            image: grayImage,
            scaleFactor: 1.3,
            minNeighbors: 5,
            flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            minSize: new Size(100, 100)
            );

        Bounds  meshRendererBounds = GetComponentInChildren <MeshRenderer>().bounds;
        Vector3 meshRendererCenter = meshRendererBounds.center;
        Vector3 maxBound           = meshRendererBounds.max;
        Vector3 minBound           = meshRendererBounds.min;

        OpenCvSharp.Rect rect      = new OpenCvSharp.Rect((int)meshRendererCenter.x + 350, (int)meshRendererCenter.y + 50, 600, 600);
        var global_rectangle_color = Scalar.FromRgb(0, 0, 255);

        Cv2.Rectangle(_image, rect, global_rectangle_color, 3);
        //Console.WriteLine("Detected faces: {0}", faces.Length);
        //Debug.Log(faces.Length);

        //var rnd = new System.Random();

        var face_count = 0;
        var eye_count  = 0;

        //var ear_count = 0;
        //var mouth_count = 0;
        foreach (var faceRect in faces)
        {
            var detectedFaceImage = new Mat(_image, faceRect);
            //Cv2.ImShow(string.Format("Face {0}", face_count), detectedFaceImage);
            //Cv2.WaitKey(1); // do events

            var facec_rectangle_color = Scalar.FromRgb(255, 0, 0);
            Cv2.Rectangle(_image, faceRect, facec_rectangle_color, 3);


            var detectedFaceGrayImage = new Mat();
            Cv2.CvtColor(detectedFaceImage, detectedFaceGrayImage, ColorConversionCodes.BGRA2GRAY);


            var eyes = eye_cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.3,
                minNeighbors: 5,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(50, 50)
                );
            foreach (var eyeRect in eyes)
            {
                var detectedEyeImage = new Mat(_image, eyeRect);
                //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
                //Cv2.WaitKey(1); // do events

                var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
                Cv2.Rectangle(_image, eyeRect, eye_rectangle_color, 3);


                var detectedEyeGrayImage = new Mat();
                Cv2.CvtColor(detectedEyeImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

                eye_count++;
            }


            //var left_ear = left_ear_cascade.DetectMultiScale(
            //    image: grayImage,
            //    scaleFactor: 1.3,
            //    minNeighbors: 5,
            //    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            //    minSize: new Size(50, 50)
            //);
            //foreach (var ear in left_ear)
            //{
            //    var detectedEarImage = new Mat(_image, ear);
            //    //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
            //    //Cv2.WaitKey(1); // do events

            //    var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
            //    Cv2.Rectangle(_image, ear, eye_rectangle_color, 3);


            //    var detectedEyeGrayImage = new Mat();
            //    Cv2.CvtColor(detectedEarImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

            //    ear_count++;
            //}


            // var right_ear = right_ear_cascade.DetectMultiScale(
            //    image: grayImage,
            //    scaleFactor: 1.3,
            //    minNeighbors: 5,
            //    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            //    minSize: new Size(50, 50)
            //);
            //foreach (var ear in right_ear)
            //{
            //    var detectedEarImage = new Mat(_image, ear);
            //    //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
            //    //Cv2.WaitKey(1); // do events

            //    var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
            //    Cv2.Rectangle(_image, ear, eye_rectangle_color, 3);


            //    var detectedEyeGrayImage = new Mat();
            //    Cv2.CvtColor(detectedEarImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

            //    ear_count++;
            //}


            // var mouth = mouth_cascade.DetectMultiScale(
            //    image: grayImage,
            //    scaleFactor: 1.3,
            //    minNeighbors: 5,
            //    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            //    minSize: new Size(50, 50)
            //);
            //foreach (var m in mouth)
            //{
            //    var detectedEarImage = new Mat(_image, m);
            //    //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
            //    //Cv2.WaitKey(1); // do events

            //    var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
            //    Cv2.Rectangle(_image, m, eye_rectangle_color, 3);


            //    var detectedEyeGrayImage = new Mat();
            //    Cv2.CvtColor(detectedEarImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

            //    mouth_count++;
            //}
            face_count++;
        }
        //Debug.Log(face_count);
        if (face_count == 1 && eye_count == 2 && !waitSoundEffect)
        {
            //Debug.Log(faces[0]);
            //Debug.Log(meshRendererCenter.x);
            //Debug.Log((int)meshRendererCenter.y + 50);
            Point origin = faces[0].Location;
            float width  = faces[0].Width;
            float height = faces[0].Height;
            // Verification si le rect de la face est bien dans la zone de photo
            if (origin.X > (int)meshRendererCenter.x + 350 &&
                origin.X + width < (int)meshRendererCenter.x + 350 + 600 &&
                origin.Y > (int)meshRendererCenter.y + 50 &&
                origin.Y + height < (int)meshRendererCenter.y + 5 + 600 &&
                width > 400 &&
                height > 400)
            {
                Debug.Log("Take photo !");
                TakePhoto();
                soundEffects.MakePhotoSound();
                waitSoundEffect = true;
            }
        }



        //Cv2.ImShow("Haar Detection", _image);
        //Cv2.WaitKey(1); // do events
    }
Exemple #11
0
 public static void EqualizeHistogram(this Mat self, Mat output)
 {
     Cv2.EqualizeHist(self, output);
 }
Exemple #12
0
        static void Main(string[] args)
        {
            Console.WriteLine("Trying to capture video");

            // Opens MP4 file (ffmpeg is probably needed)
            using var capture = new VideoCapture(0);

            int sleepTime = (int)Math.Round(1000 / capture.Fps);

            using (var window = new Window("capture"))
            {
                // Frame image buffer
                var image = new Mat();

                // When the movie playback reaches end, Mat.data becomes NULL.
                while (true)
                {
                    capture.Read(image); // same as cvQueryFrame
                    if (image.Empty())
                    {
                        break;
                    }

                    var grayImage = new Mat();
                    Cv2.CvtColor(image, grayImage, ColorConversionCodes.BGRA2GRAY);
                    Cv2.EqualizeHist(grayImage, grayImage);

                    var cascadeFrontalFace = new CascadeClassifier(@$ ".{Path.DirectorySeparatorChar}CascadeClassifiers{Path.DirectorySeparatorChar}haarcascade_frontalface_alt2.xml");
                    var cascade            = new CascadeClassifier(@$ ".{Path.DirectorySeparatorChar}CascadeClassifiers{Path.DirectorySeparatorChar}haarcascade_profileface.xml");

                    var facesProfile = cascade.DetectMultiScale(
                        image: image,
                        scaleFactor: 1.1,
                        minNeighbors: 2,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new Size(30, 30)
                        );

                    var faces = cascadeFrontalFace.DetectMultiScale(
                        image: grayImage,
                        scaleFactor: 1.1,
                        minNeighbors: 2,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new Size(30, 30)
                        );

                    Console.WriteLine("{0} Detected faces: {1}", DateTime.Now, faces.Length + facesProfile.Length);

                    faces.ToList().ForEach(e => e.Height        += 10);
                    facesProfile.ToList().ForEach(e => e.Height += 10);
                    foreach (var faceRect in faces)
                    {
                        Cv2.Rectangle(image, faceRect, Scalar.Red, 2);
                    }

                    foreach (var faceRect in facesProfile)
                    {
                        Cv2.Rectangle(image, faceRect, Scalar.Blue, 2);
                    }

                    window.ShowImage(image);

                    Cv2.WaitKey(sleepTime);
                }
            }
        }
        /*
         * SK: Metóda, ktorá sa zavolá, pre vygenerovanie otázky
         * ENG: Method, which is called for generating question
         */
        void GenerateQuestion()
        {
            if (QnA.Count > 0)
            {
                currentQuestion   = Random.Range(0, QnA.Count);
                QuestionText.text = QnA[currentQuestion].Questiion;
                Image.texture     = QnA[currentQuestion].sample;

                Mat image = Unity.TextureToMat(QnA[currentQuestion].sample);

                // Deteguje tváre
                var gray = image.CvtColor(ColorConversionCodes.BGR2GRAY);
                Cv2.EqualizeHist(gray, gray);
                // deteguje zhodné regióny (Faces bounding)

                FileStorage storageFaces = new FileStorage(Faces.text, FileStorage.Mode.Read | FileStorage.Mode.Memory);
                cascadeFaces = new CascadeClassifier();
                if (!cascadeFaces.Read(storageFaces.GetFirstTopLevelNode()))
                {
                    throw new System.Exception("FaceProcessor.Initialize: Failed to load Faces cascade classifier");
                }

                recognizer = FaceRecognizer.CreateFisherFaceRecognizer();
                recognizer.Load(new FileStorage(RecognizerXml.text, FileStorage.Mode.Read | FileStorage.Mode.Memory));
                // popisky (labels)
                names = new string[] { "Cooper", "DeGeneres", "Nyongo", "Pitt", "Roberts", "Spacey" };

                OpenCvSharp.Rect[] rawFaces = cascadeFaces.DetectMultiScale(gray, 1.1, 6);

                foreach (var faceRect in rawFaces)
                {
                    var grayFace = new Mat(gray, faceRect);
                    if (requiredSize.Width > 0 && requiredSize.Height > 0)
                    {
                        grayFace = grayFace.Resize(requiredSize);
                    }

                    int label = -1;

                    /*SK:
                     * pokús sa rozpoznať tvár:
                     * confidence" je v princípe vzdialenosť od vzorky k najbližšej známej tvári
                     * 0 je nejaká „ideálna zhoda“
                     */

                    /*ENG:
                     *  now try to recognize the face:
                     *  confidence" here is actually a misguide. in fact, it's "distance from the sample to the closest known face".
                     *  0 being some "ideal match"
                     */

                    double confidence = 0.0;
                    recognizer.Predict(grayFace, out label, out confidence);
                    faceName = names[label];

                    int          line        = 0;
                    const int    textPadding = 2;
                    const double textScale   = 2.0;
                    string       messge      = String.Format("{0}", names[label], (int)confidence);
                    var          textSize    = Cv2.GetTextSize(messge, HersheyFonts.HersheyPlain, textScale, 1, out line);
                    var          textBox     = new OpenCvSharp.Rect(
                        faceRect.X + (faceRect.Width - textSize.Width) / 2 - textPadding,
                        faceRect.Bottom,
                        textSize.Width + textPadding * 2,
                        textSize.Height + textPadding * 2
                        );
                    faceName = names[label];
                    Debug.Log(faceName);
                }
                // Priradenie obrázku textúre Image komponentu na scéne
                // Asign image to the texture on Image component
                var texture  = Unity.MatToTexture(image);
                var rawImage = Image;
                rawImage.texture = texture;

                var transform = Image.GetComponent <RectTransform>();
                transform.sizeDelta = new Vector2(image.Width, image.Height);

                for (int i = 0; i < Options.Length; i++)
                {
                    Options[i].transform.GetChild(0).GetComponent <Text>().text = QnA[currentQuestion].Answers[i];
                    if (faceName == Options[i].transform.GetChild(0).GetComponent <Text>().text)
                    {
                        Options[i].GetComponent <AnswerScript>().isCorrect = true;
                    }
                    else
                    {
                        Options[i].GetComponent <AnswerScript>().isCorrect = false;
                    }
                }
            }
            else
            {
                GameOver();
            }
        }
Exemple #14
0
        //展示车牌
        private void ProcessAndShowImage(Bitmap image, PlateLocator.ParameterList pl)
        {
            currentTabCount = 0;

            Mat matIn = image.ToMat();

            AddTag("原图", matIn);

            //转为hsv图片
            Mat matHsv = matIn.CvtColor(ColorConversionCodes.BGR2HSV);

            AddTag("HSV", matHsv);

            //对v均衡化后在合并
            Mat[] matToHsv = new Mat[3];
            Cv2.Split(matHsv, out matToHsv);
            Cv2.EqualizeHist(matToHsv[2], matToHsv[2]);
            Mat equalizeHistHsv = new Mat();

            Cv2.Merge(matToHsv, equalizeHistHsv);
            AddTag("均衡HSV", equalizeHistHsv);

            //在均衡化后的hsv颜色空间红寻找黄色和蓝色区域
            Mat    matYellow  = new Mat();
            Mat    matBlue    = new Mat();
            Scalar yellow_low = new Scalar(15, 70, 70);
            Scalar yellow_up  = new Scalar(40, 255, 255);
            Scalar blue_low   = new Scalar(100, 70, 70);
            Scalar blue_up    = new Scalar(140, 255, 255);

            Cv2.InRange(equalizeHistHsv, yellow_low, yellow_up, matYellow);
            Cv2.InRange(equalizeHistHsv, blue_low, blue_up, matBlue);
            Mat matAll = matBlue + matYellow;

            AddTag("黄区", matYellow);
            AddTag("蓝区", matBlue);
            AddTag("黄蓝区", matAll);

            //使用形态学操作对选定颜色区域进行处理
            Mat matAllDilate = new Mat();
            Mat matAllErode  = new Mat();
            Mat element      = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(7, 3));

            Cv2.Dilate(matAll, matAllDilate, element);
            Cv2.Erode(matAllDilate, matAllErode, element);
            AddTag("闭操作", matAllErode);


            //寻找轮廓
            OpenCvSharp.Point[][] contours;         //vector<vector<Point>> contours;
            HierarchyIndex[]      hierarchyIndexes; //vector<Vec4i> hierarchy;
            Cv2.FindContours(
                matAllErode,
                out contours,
                out hierarchyIndexes,
                mode: RetrievalModes.Tree,
                method: ContourApproximationModes.ApproxSimple); //求轮廓

            Mat matContours = matIn.Clone();

            Cv2.DrawContours(matContours, contours, -1, new Scalar(0, 0, 255), 2); //画轮廓线
            AddTag("轮廓", matContours);

            //求轮廓的最小外接矩形
            Mat         matRects = matIn.Clone();
            List <Rect> rects    = new List <Rect>();

            foreach (OpenCvSharp.Point[] p in contours)
            {
                Rect rect = Cv2.BoundingRect(p);
                if ((double)rect.Height / rect.Width > pl.HeightDivideWidthLow && (double)rect.Height / rect.Width < pl.HeightDivedeWidthUp &&
                    rect.Height > pl.HeightLow && rect.Height < pl.HeightUp &&
                    rect.Width > pl.WidthLow && rect.Width < pl.WidthUp)
                {
                    rects.Add(rect);
                    Cv2.Rectangle(matRects, rect, new Scalar(0, 0, 255), 3);
                }
            }
            AddTag("外接矩形", matRects);


            ShowSpliteImage(rects, matIn);
        }
Exemple #15
0
        // 主要内容,图像处理方法的api
        private Mat myOPENCV_run(Mat image_in, Mat image_out)
        {
            image_out = image_in;                          // 入图传给出图
            for (int i = 0; i < listBox2.Items.Count; i++) //执行 列表框2内的方法
            {
                switch ((MyOPENCV)myOPENCV_runlist[i, 0])  // 列表框2内的运行方法
                {
                case MyOPENCV.cvt_color:                   //颜色转换 (入图,出图,颜色转换符,)
                {
                    Cv2.CvtColor(image_out, image_out, (ColorConversionCodes)myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 2]);
                    break;
                }

                case MyOPENCV.boxfilter:    //方框滤波
                {
                    OpenCvSharp.Size size;
                    size.Width  = myOPENCV_runlist[i, 2];
                    size.Height = myOPENCV_runlist[i, 3];
                    Cv2.BoxFilter(image_out, image_out, myOPENCV_runlist[i, 1], size);
                    break;
                }

                case MyOPENCV.blur:     //均值滤波
                {
                    OpenCvSharp.Size size;
                    size.Width  = myOPENCV_runlist[i, 1];
                    size.Height = myOPENCV_runlist[i, 2];
                    Cv2.Blur(image_out, image_out, size);
                    break;
                }

                case MyOPENCV.gaussianblur:      // 高斯滤波
                {
                    OpenCvSharp.Size size;
                    double           sigmaX, sigmaY;
                    size.Width  = myOPENCV_runlist[i, 1];
                    size.Height = myOPENCV_runlist[i, 2];
                    sigmaX      = (double)myOPENCV_runlist[i, 3];
                    sigmaY      = (double)myOPENCV_runlist[i, 4];

                    Cv2.GaussianBlur(image_out, image_out, size, sigmaX, sigmaY);
                    break;
                }

                case MyOPENCV.medianblur:    //中值滤波
                {
                    Cv2.MedianBlur(image_in, image_out, myOPENCV_runlist[i, 1]);
                    break;
                }

                case MyOPENCV.bilateralfilter:    //双边滤波
                {
                    Mat    image_out2 = new Mat();
                    double sigmaColor, sigmaSpace;
                    sigmaColor = (double)myOPENCV_runlist[i, 2] * 2;
                    sigmaSpace = (double)myOPENCV_runlist[i, 3] / 2;
                    Cv2.BilateralFilter(image_out, image_out2, myOPENCV_runlist[i, 1], sigmaColor, sigmaSpace);
                    image_out = image_out2;
                    break;
                }

                case MyOPENCV.dilate:    //膨胀
                {
                    Mat image_element = new Mat();
                    OpenCvSharp.Size size;
                    size.Width    = myOPENCV_runlist[i, 2];
                    size.Height   = myOPENCV_runlist[i, 3];
                    image_element = Cv2.GetStructuringElement((MorphShapes)myOPENCV_runlist[i, 1], size);
                    Cv2.Dilate(image_out, image_out, image_element);
                    break;
                }

                case MyOPENCV.erode:    //腐蚀
                {
                    Mat image_element = new Mat();
                    OpenCvSharp.Size size;
                    size.Width    = myOPENCV_runlist[i, 2];
                    size.Height   = myOPENCV_runlist[i, 3];
                    image_element = Cv2.GetStructuringElement((MorphShapes)myOPENCV_runlist[i, 1], size);
                    Cv2.Erode(image_out, image_out, image_element);
                    break;
                }

                case MyOPENCV.morphologyex:    //高级形态学变换
                {
                    Mat image_element = new Mat();
                    OpenCvSharp.Size size;
                    size.Width    = myOPENCV_runlist[i, 3];
                    size.Height   = myOPENCV_runlist[i, 4];
                    image_element = Cv2.GetStructuringElement((MorphShapes)myOPENCV_runlist[i, 2], size);
                    Cv2.MorphologyEx(image_out, image_out, (MorphTypes)myOPENCV_runlist[i, 1], image_element);
                    break;
                }

                case MyOPENCV.floodfill:    //漫水填充
                {
                    OpenCvSharp.Point point;
                    point.X = myOPENCV_runlist[i, 1];
                    point.Y = myOPENCV_runlist[i, 2];
                    OpenCvSharp.Scalar scalar;
                    scalar = myOPENCV_runlist[i, 3];
                    Cv2.FloodFill(image_out, point, scalar);
                    break;
                }

                case MyOPENCV.pyrup:    //尺寸放大
                {
                    OpenCvSharp.Size size;
                    size.Width  = image_out.Cols * 2;
                    size.Height = image_out.Rows * 2;
                    Cv2.PyrUp(image_out, image_out, size);
                    break;
                }

                case MyOPENCV.pyrdown:    //尺寸缩小
                {
                    OpenCvSharp.Size size;
                    size.Width  = image_out.Cols / 2;
                    size.Height = image_out.Rows / 2;
                    Cv2.PyrDown(image_out, image_out, size);
                    break;
                }

                case MyOPENCV.resize:    //尺寸调整
                {
                    OpenCvSharp.Size   size;
                    InterpolationFlags interpolationFlags;
                    size.Width         = image_out.Cols * myOPENCV_runlist[i, 1] / 10;
                    size.Height        = image_out.Rows * myOPENCV_runlist[i, 2] / 10;
                    interpolationFlags = (InterpolationFlags)myOPENCV_runlist[i, 3];
                    Cv2.Resize(image_out, image_out, size, 0, 0, interpolationFlags);
                    break;
                }

                case MyOPENCV.threshold:    //固定阈值化
                {
                    Cv2.Threshold(image_out, image_out, myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 2], (ThresholdTypes)myOPENCV_runlist[i, 3]);
                    break;
                }

                case MyOPENCV.canny:    //边缘检测CANNY
                {
                    Mat image_out2 = new Mat();
                    Cv2.Canny(image_out, image_out2, myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 2], myOPENCV_runlist[i, 3]);
                    image_out = image_out2;
                    break;
                }

                case MyOPENCV.sobel:    //边缘检测SOBEL
                {
                    Cv2.Sobel(image_out, image_out, -1, myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 2], myOPENCV_runlist[i, 3]);
                    break;
                }

                case MyOPENCV.laplacian:    //边缘检测LAPLACIAN
                {
                    myOPENCV_runlist[i, 1] = 0;
                    Cv2.Laplacian(image_out, image_out, 0, myOPENCV_runlist[i, 2], myOPENCV_runlist[i, 3]);
                    break;
                }

                case MyOPENCV.scharr:    //边缘检测SCHARR
                {
                    Cv2.Scharr(image_out, image_out, -1, myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 2]);
                    break;
                }

                case MyOPENCV.convertscaleabs:    //图像快速增强
                {
                    double alpha, beta;
                    alpha = (double)myOPENCV_runlist[i, 1] / 10;
                    beta  = (double)myOPENCV_runlist[i, 2] / 10;
                    Cv2.ConvertScaleAbs(image_out, image_out, alpha, beta);
                    break;
                }

                case MyOPENCV.addweighted:    //图像融合
                {
                    Mat    image_in2 = new Mat(my_imagesource2);
                    double alpha, beta, gamma;
                    alpha = (double)myOPENCV_runlist[i, 1] / 10;
                    beta  = (double)myOPENCV_runlist[i, 2] / 10;
                    gamma = (double)myOPENCV_runlist[i, 3] / 10;
                    Cv2.AddWeighted(image_out, alpha, image_in2, beta, gamma, image_out);
                    break;
                }

                case MyOPENCV.houghlines:                                     //霍夫标准变换
                {
                    Scalar             scalar = new Scalar(0x00, 0xFF, 0x00); //绿色
                    LineSegmentPolar[] lines;
                    OpenCvSharp.Size   size = new OpenCvSharp.Size(image_out.Width, image_out.Height);
                    Mat image_out3          = new Mat(size, MatType.CV_8UC3);
                    lines = Cv2.HoughLines(image_out, 1, Cv2.PI / 180, myOPENCV_runlist[i, 1]);
                    for (int ii = 0; ii < lines.Length; ii++)
                    {
                        //double rho, theta;
                        OpenCvSharp.Point pt1, pt2;
                        double            a = Math.Cos(lines[ii].Theta), b = Math.Sin(lines[ii].Theta);
                        double            x0 = a * lines[ii].Rho, y0 = b * lines[ii].Rho;
                        pt1.X = (int)Math.Round(x0 + 1000 * (-b));
                        pt1.Y = (int)Math.Round(y0 + 1000 * (a));
                        pt2.X = (int)Math.Round(x0 - 1000 * (-b));
                        pt2.Y = (int)Math.Round(y0 - 1000 * (a));
                        Cv2.Line(image_out3, pt1, pt2, scalar, 1, LineTypes.AntiAlias);
                    }
                    if (myOPENCV_runlist[i, 2] == 0)
                    {
                        Cv2.AddWeighted(image_out3, (double)myOPENCV_runlist[i, 3] / 10, image_in, (double)myOPENCV_runlist[i, 4] / 10, 0, image_out);
                    }
                    else
                    {
                        image_out = image_out3;
                    }
                    break;
                }

                case MyOPENCV.houghlinep:                                     //霍夫累计概率变换
                {
                    Scalar             scalar = new Scalar(0x00, 0xFF, 0x00); //绿色
                    LineSegmentPoint[] lines;                                 // 线段检索
                    OpenCvSharp.Size   size = new OpenCvSharp.Size(image_out.Width, image_out.Height);
                    Mat image_out3          = new Mat(size, MatType.CV_8UC3);
                    lines = Cv2.HoughLinesP(image_out, 1, Cv2.PI / 180, myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 3], myOPENCV_runlist[i, 4]);
                    for (int ii = 0; ii < lines.Length; ii++)
                    {
                        OpenCvSharp.Point point1, point2;
                        point1.X = lines[i].P1.X;
                        point1.Y = lines[i].P1.Y;
                        point2.X = lines[i].P2.X;
                        point2.Y = lines[i].P2.Y;
                        Cv2.Line(image_out3, point1, point2, scalar, 1, LineTypes.AntiAlias);
                    }
                    if (myOPENCV_runlist[i, 2] == 0)
                    {
                        Cv2.AddWeighted(image_out3, 1, image_in, 0.8, 0, image_out);
                    }
                    else
                    {
                        image_out = image_out3;
                    }
                    break;
                }

                case MyOPENCV.houghcircles:                                 //霍夫圆变换
                {
                    Scalar           scalar = new Scalar(0x00, 0xFF, 0x00); //绿色
                    CircleSegment[]  circles;
                    OpenCvSharp.Size size = new OpenCvSharp.Size(image_out.Width, image_out.Height);
                    Mat image_out3        = new Mat(size, MatType.CV_8UC3);
                    circles = Cv2.HoughCircles(image_out, HoughMethods.Gradient, 1, myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 2], myOPENCV_runlist[i, 3], 0, myOPENCV_runlist[i, 4]);
                    for (int ii = 0; ii < circles.Length; ii++)
                    {
                        OpenCvSharp.Point center;
                        center.X = (int)Math.Round(circles[ii].Center.X);
                        center.Y = (int)Math.Round(circles[ii].Center.Y);
                        int radius = (int)Math.Round(circles[ii].Radius);
                        Cv2.Circle(image_out3, center.X, center.Y, radius, scalar);
                        Cv2.Circle(image_out3, center, radius, scalar);
                    }
                    Cv2.AddWeighted(image_out3, 1, image_in, 0.6, 0, image_out);

                    break;
                }

                case MyOPENCV.remap:    //重映射
                {
                    OpenCvSharp.Size size = new OpenCvSharp.Size(image_out.Width, image_out.Height);

                    Mat map_x = new Mat(size, MatType.CV_32FC1), map_y = new Mat(size, MatType.CV_32FC1);
                    for (int ii = 0; ii < image_out.Rows; ii++)
                    {
                        for (int jj = 0; jj < image_out.Cols; jj++)
                        {
                            if (myOPENCV_runlist[i, 1] == 0)
                            {
                                map_x.Set <float>(ii, jj, jj);                  //上下翻转
                                map_y.Set <float>(ii, jj, image_out.Rows - ii); //上下翻转
                            }
                            else if (myOPENCV_runlist[i, 1] == 1)
                            {
                                map_x.Set <float>(ii, jj, image_out.Cols - jj); //左右翻转
                                map_y.Set <float>(ii, jj, ii);                  //左右翻转
                            }
                            else if (myOPENCV_runlist[i, 1] == 2)
                            {
                                map_x.Set <float>(ii, jj, image_out.Cols - jj);       //上下左右翻转
                                map_y.Set <float>(ii, jj, image_out.Rows - ii);       //上下左右翻转
                            }
                            else if (myOPENCV_runlist[i, 1] == 3)
                            {
                                map_x.Set <float>(ii, jj, (float)myOPENCV_runlist[i, 2] / 10 * jj);       //放大缩小
                                map_y.Set <float>(ii, jj, (float)myOPENCV_runlist[i, 2] / 10 * ii);       //放大缩小
                            }
                        }
                    }
                    Cv2.Remap(image_out, image_out, map_x, map_y);
                    break;
                }

                case MyOPENCV.warpaffine:    //仿射变换
                {
                    if (0 == myOPENCV_runlist[i, 1])
                    {
                        Mat rot_mat = new Mat(2, 3, MatType.CV_32FC1);
                        OpenCvSharp.Point center = new OpenCvSharp.Point(image_out.Cols / 2, image_out.Rows / 2);
                        double            angle  = myOPENCV_runlist[i, 2];
                        double            scale  = (double)myOPENCV_runlist[i, 3] / 10;
                        ///// 通过上面的旋转细节信息求得旋转矩阵
                        rot_mat = Cv2.GetRotationMatrix2D(center, angle, scale);
                        ///// 旋转已扭曲图像
                        Cv2.WarpAffine(image_out, image_out, rot_mat, image_out.Size());
                    }
                    else
                    {
                        Point2f[] srcTri   = new Point2f[3];
                        Point2f[] dstTri   = new Point2f[3];
                        Mat       warp_mat = new Mat(2, 3, MatType.CV_32FC1);
                        Mat       warp_dst;
                        warp_dst  = Mat.Zeros(image_out.Rows, image_out.Cols, image_out.Type());
                        srcTri[0] = new Point2f(0, 0);
                        srcTri[1] = new Point2f(image_out.Cols, 0);
                        srcTri[2] = new Point2f(0, image_out.Rows);
                        dstTri[0] = new Point2f((float)(image_out.Cols * myOPENCV_runlist[i, 2] / 100), (float)(image_out.Rows * myOPENCV_runlist[i, 2] / 100));
                        dstTri[1] = new Point2f((float)(image_out.Cols * (1 - (float)myOPENCV_runlist[i, 3] / 100)), (float)(image_out.Rows * myOPENCV_runlist[i, 3] / 100));
                        dstTri[2] = new Point2f((float)(image_out.Cols * myOPENCV_runlist[i, 4] / 100), (float)(image_out.Rows * (1 - (float)myOPENCV_runlist[i, 4] / 100)));
                        warp_mat  = Cv2.GetAffineTransform(srcTri, dstTri);
                        Cv2.WarpAffine(image_out, image_out, warp_mat, image_out.Size());
                    }
                    break;
                }

                case MyOPENCV.equalizehist:    //直方图均衡化
                {
                    Cv2.EqualizeHist(image_out, image_out);
                    break;
                }

                case MyOPENCV.facedetection:         //人脸识别
                {
                    if (0 == myOPENCV_runlist[i, 1]) // 参数一为0 调用haar,其余数字调用lbp
                    {
                        var haarCascade = new CascadeClassifier(@"haarcascade_frontalface_alt.xml");
                        Mat haarResult  = DetectFace(image_out, haarCascade);
                        image_out = haarResult;
                    }
                    else
                    {
                        var lbpCascade = new CascadeClassifier(@"lbpcascade_frontalface.xml");
                        Mat lbpResult  = DetectFace(image_out, lbpCascade);
                        image_out = lbpResult;
                    }

                    break;
                }

                case MyOPENCV.matchtemplate:                                             // 模板匹配
                {
                    Mat originalMat = Cv2.ImRead(my_imagesource, ImreadModes.AnyColor);  //母图
                    Mat modelMat    = Cv2.ImRead(my_imagesource2, ImreadModes.AnyColor); //模板
                    Mat resultMat   = new Mat();                                         // 匹配结果

                    //resultMat.Create(mat1.Cols - modelMat.Cols + 1, mat1.Rows - modelMat.Cols + 1, MatType.CV_32FC1);//创建result的模板,就是MatchTemplate里的第三个参数
                    if (0 == myOPENCV_runlist[i, 1])
                    {
                        Cv2.MatchTemplate(originalMat, modelMat, resultMat, TemplateMatchModes.SqDiff);        //进行匹配(1母图,2模版子图,3返回的result,4匹配模式)
                    }
                    else if (1 == myOPENCV_runlist[i, 1])
                    {
                        Cv2.MatchTemplate(originalMat, modelMat, resultMat, TemplateMatchModes.SqDiffNormed);
                    }
                    else if (2 == myOPENCV_runlist[i, 1])
                    {
                        Cv2.MatchTemplate(originalMat, modelMat, resultMat, TemplateMatchModes.CCorr);
                    }
                    else if (3 == myOPENCV_runlist[i, 1])
                    {
                        Cv2.MatchTemplate(originalMat, modelMat, resultMat, TemplateMatchModes.CCorrNormed);
                    }
                    else if (4 == myOPENCV_runlist[i, 1])
                    {
                        Cv2.MatchTemplate(originalMat, modelMat, resultMat, TemplateMatchModes.CCoeff);
                    }
                    else if (5 == myOPENCV_runlist[i, 1])
                    {
                        Cv2.MatchTemplate(originalMat, modelMat, resultMat, TemplateMatchModes.CCoeffNormed);
                    }
                    OpenCvSharp.Point minLocation, maxLocation, matchLocation;
                    Cv2.MinMaxLoc(resultMat, out minLocation, out maxLocation);
                    matchLocation = maxLocation;
                    Mat mask = originalMat.Clone();

                    Cv2.Rectangle(mask, minLocation, new OpenCvSharp.Point(minLocation.X + modelMat.Cols, minLocation.Y + modelMat.Rows), Scalar.Green, 2);         //画出匹配的矩  (图像,最小点,最大点,颜色,线宽)

                    image_out = mask;
                    break;
                }

                case MyOPENCV.find_draw_contours:                                      // 找出并绘制轮廓
                {
                    Cv2.CvtColor(image_out, image_out, ColorConversionCodes.RGB2GRAY); //转换为灰度图
                    //Cv2.Blur(image_out, image_out, new OpenCvSharp.Size(2, 2));  //滤波

                    Cv2.Canny(image_out, image_out, 100, 200);              //Canny边缘检测

                    OpenCvSharp.Point[][] contours;
                    HierarchyIndex[]      hierarchly;
                    Cv2.FindContours(image_out, out contours, out hierarchly, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0)); //获得轮廓

                    Mat    dst_Image = Mat.Zeros(image_out.Size(), image_out.Type());                                                                                    // 图片像素值归零
                    Random rnd       = new Random();
                    for (int j = 0; j < contours.Length; j++)
                    {
                        Scalar color = new Scalar(myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 2], myOPENCV_runlist[i, 3]);
                        //Scalar color = new Scalar(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
                        Cv2.DrawContours(dst_Image, contours, j, color, myOPENCV_runlist[i, 4], LineTypes.Link8, hierarchly);             //画出轮廓
                    }
                    image_out = dst_Image;
                    break;
                }

                case MyOPENCV.componentdefectdetecting:                                // 零件缺陷检测
                {
                    Cv2.CvtColor(image_out, image_out, ColorConversionCodes.RGB2GRAY); //转换为灰度图
                    //Cv2.Blur(image_out, image_out, new OpenCvSharp.Size(2, 2));  //滤波

                    Cv2.Canny(image_out, image_out, 100, 200);              //Canny边缘检测

                    OpenCvSharp.Point[][] contours;
                    HierarchyIndex[]      hierarchly;
                    Cv2.FindContours(image_out, out contours, out hierarchly, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0)); //获得轮廓

                    Mat    dst_Image = Mat.Zeros(image_out.Size(), image_out.Type());                                                                                    // 图片像素值归零
                    Random rnd       = new Random();
                    for (int j = 0; j < contours.Length; j++)
                    {
                        Scalar color = new Scalar(myOPENCV_runlist[i, 1], myOPENCV_runlist[i, 2], myOPENCV_runlist[i, 3]);
                        //Scalar color = new Scalar(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
                        Cv2.DrawContours(dst_Image, contours, j, color, myOPENCV_runlist[i, 4], LineTypes.Link8, hierarchly);               //画出轮廓
                    }


                    Mat cnt = new Mat();
                    Cv2.ConvexHull(image_out, cnt);



                    break;
                }

                default: break;
                }
            }
            return(image_out);
        }
Exemple #16
0
    // Simple example of canny edge detect
    void ProcessImage(Mat _image)
    {
        Cv2.Flip(_image, _image, FlipMode.X);
        Cv2.Canny(_image, cannyImage, 100, 100);
        var grayImage = new Mat();

        Cv2.CvtColor(_image, grayImage, ColorConversionCodes.BGR2GRAY);
        Cv2.EqualizeHist(grayImage, grayImage);

        var face_cascade = new CascadeClassifier();

        face_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_frontalface_default.xml");
        var eye_cascade = new CascadeClassifier();

        eye_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_eye_tree_eyeglasses.xml");
        var mouth_cascade = new CascadeClassifier();

        mouth_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_mcs_mouth.xml");
        //Debug.Log(" ");

        var faces = face_cascade.DetectMultiScale(
            image: grayImage,
            scaleFactor: 1.3,
            minNeighbors: 5,
            flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            minSize: new Size(100, 100)
            );

        //Bounds meshRendererBounds = GetComponentInChildren<MeshRenderer>().bounds;
        //Vector3 meshRendererCenter = meshRendererBounds.center;
        //Vector3 maxBound = meshRendererBounds.max;
        //Vector3 minBound = meshRendererBounds.min;
        //OpenCvSharp.Rect rect = new OpenCvSharp.Rect((int)meshRendererCenter.x + 350,(int)meshRendererCenter.y + 50, 600,600);
        var global_rectangle_color = Scalar.FromRgb(0, 0, 255);
        //Cv2.Rectangle(_image, rect, global_rectangle_color, 3);
        //Console.WriteLine("Detected faces: {0}", faces.Length);
        //Debug.Log(faces.Length);

        //var rnd = new System.Random();

        var face_count  = 0;
        var mouth_count = 0;
        var eye_count   = 0;

        foreach (var faceRect in faces)
        {
            var detectedFaceImage = new Mat(_image, faceRect);
            //Cv2.ImShow(string.Format("Face {0}", face_count), detectedFaceImage);
            //Cv2.WaitKey(1); // do events

            var facec_rectangle_color = Scalar.FromRgb(255, 0, 0);
            face = faceRect;
            Cv2.Rectangle(_image, faceRect, facec_rectangle_color, 3);


            rectFront = new OpenCvSharp.Rect(faceRect.X + faceRect.Width / 2 - 50, faceRect.Y + 50, 100, 50);
            //Cv2.Rectangle(_image, rectFront, global_rectangle_color, 3);



            var detectedFaceGrayImage = new Mat();
            Cv2.CvtColor(detectedFaceImage, detectedFaceGrayImage, ColorConversionCodes.BGRA2GRAY);

            var eyes = eye_cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.3,
                minNeighbors: 5,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(50, 50)
                );


            foreach (var eyeRect in eyes)
            {
                var detectedEyeImage = new Mat(_image, eyeRect);
                //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
                //Cv2.WaitKey(1); // do events

                var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
                //Cv2.Rectangle(_image, eyeRect, eye_rectangle_color, 3);

                if (eye_count == 1)
                {
                    // Par rapport à la position de l'oeil gauche
                    rectEyeLeft = new OpenCvSharp.Rect(eyeRect.X + 75, eyeRect.Y + 100, 25, 25);
                    //Cv2.Rectangle(_image, rectEyeLeft, global_rectangle_color, 3);
                }
                else
                {
                    // Par rapport à la position de l'oeil droit
                    rectEyeRight = new OpenCvSharp.Rect(eyeRect.X, eyeRect.Y + 100, 25, 25);
                    //Cv2.Rectangle(_image, rectEyeRight, global_rectangle_color, 3);
                }



                var detectedEyeGrayImage = new Mat();
                Cv2.CvtColor(detectedEyeImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

                eye_count++;
            }


            var mouth = mouth_cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.3,
                minNeighbors: 5,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(50, 50)
                );
            foreach (var m in mouth)
            {
                var detectedEarImage = new Mat(_image, m);
                //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
                //Cv2.WaitKey(1); // do events

                if (m.Y > eyes[0].Y && Mathf.Abs(m.Y - eyes[0].Y) > 100)
                {
                    //Debug.Log("mouth height :");
                    //Debug.Log(m.Height);
                    var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
                    Cv2.Rectangle(_image, m, eye_rectangle_color, 3);
                    lipHeight = (float)m.Height / (float)face.Height;
                }

                var detectedEyeGrayImage = new Mat();
                Cv2.CvtColor(detectedEarImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

                mouth_count++;
            }

            face_count++;
        }
    }
Exemple #17
0
    private static void SegmentationCannyFilledPolygons(Camera camera, out List <Point2f> v_center, out List <float> v_radius)
    {
        RenderTexture activeRenderTexture = RenderTexture.active;

        RenderTexture.active = camera.targetTexture;
        camera.Render();
        Texture2D currentFrame = new Texture2D(camera.targetTexture.width, camera.targetTexture.height);

        currentFrame.ReadPixels(new UnityEngine.Rect(0, 0, camera.targetTexture.width, camera.targetTexture.height), 0, 0);
        currentFrame.Apply();
        RenderTexture.active = activeRenderTexture;
        Mat image = OpenCvSharp.Unity.TextureToMat(currentFrame);

        UnityEngine.Object.Destroy(currentFrame);
        Mat grayImage = new Mat();

        Cv2.CvtColor(image, grayImage, ColorConversionCodes.BGR2GRAY);
        Cv2.EqualizeHist(grayImage, grayImage);
        Cv2.GaussianBlur(grayImage, grayImage, new Size(9, 9), 2, 2);

        Mat edgesImage = new Mat();

        Cv2.Canny(grayImage, edgesImage, 40, 20);
        Point[][]        contours_canny;
        HierarchyIndex[] hierarchy_canny;

        Cv2.FindContours(edgesImage, out contours_canny, out hierarchy_canny, RetrievalModes.List, ContourApproximationModes.ApproxSimple, null);

        Mat img_all_contours_and_filled = Mat.Zeros(edgesImage.Height, edgesImage.Width, MatType.CV_8UC1);
        Mat img_all_contours            = Mat.Zeros(edgesImage.Height, edgesImage.Width, MatType.CV_8UC1);

        for (int j = 0; j < contours_canny.Length; j++)
        {
            Scalar color = new Scalar(255, 255, 255);
            Cv2.DrawContours(img_all_contours_and_filled, contours_canny, j, color, -1, LineTypes.Link8, hierarchy_canny);
            Cv2.DrawContours(img_all_contours, contours_canny, j, color, 1, LineTypes.Link8, hierarchy_canny);
        }
        Mat img_only_closed_contours = new Mat();

        Cv2.Absdiff(img_all_contours_and_filled, img_all_contours, img_only_closed_contours);

        Point[][]        contours;
        HierarchyIndex[] hierarchy;

        Cv2.FindContours(img_only_closed_contours, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxTC89L1, null);

        Point[][]          contours_poly   = new Point[contours.Length][];
        OpenCvSharp.Rect[] boundRect       = new OpenCvSharp.Rect[contours.Length];
        List <Point2f>     contours_center = new List <Point2f> {
        };
        List <float> contours_radius       = new List <float> {
        };

        int i_contour = 0;

        foreach (Point[] contour in contours)
        {
            Point2f contour_center;
            float   contour_radius;

            contours_poly[i_contour] = Cv2.ApproxPolyDP(contour, 3, true);
            Cv2.MinEnclosingCircle(contours_poly[i_contour], out contour_center, out contour_radius);
            //currentFrame = DrawCircle(currentFrame, (int)contour_center.X, (int)contour_center.Y, (int)contour_radius);

            contours_center.Add(contour_center);
            contours_radius.Add(contour_radius);
            i_contour++;
        }

        v_center = contours_center;
        v_radius = contours_radius;

        //TextureToPNG(currentFrame);
    }
    void Update()
    {
        // If the webcam has been updated, update the frame.
        if (capture.Read(frame))
        {
            // Convert the frame into grayscale.
            Cv2.CvtColor(frame, gray, ColorConversionCodes.RGB2GRAY);

            // Equalise lighting to make landmark detection more accurate. - detecting points.
            Cv2.EqualizeHist(gray, gray);
        }


        //// As faces is easiest to detect, check for these first.
        OpenCvSharp.Rect[] faces = new OpenCvSharp.Rect[0];
        faces = haarCascade.DetectMultiScale(
            gray, 1.15, 5, HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage, new Size(60, 60));

        //// Detect if any eyes are in the image.
        //OpenCvSharp.Rect[] eyes = new OpenCvSharp.Rect[2];
        //eyes = eyeCascade.DetectMultiScale(
        //           gray, 1.15, 6, HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage, new Size(40, 40));


        //// If eyes are equal = 2 then there must be a face. - assumption is that the two eyes are from the same person.
        //if (eyes.Length == 2 && faces.Length == 0)
        //{
        //    Point averagePoint = new Point();
        //    averagePoint.X = (eyes[0].X + eyes[1].X) / 2;
        //    averagePoint.Y = (eyes[0].Y + eyes[1].Y) / 2;
        //    faces = new OpenCvSharp.Rect[1];
        //    faces[0] = new OpenCvSharp.Rect(averagePoint, new Size(150, 150));
        //}
        //// If there is an eye, there must be a face. Not as accurate as there is no other landmark to triangulate point.
        //else if (eyes.Length == 1 && faces.Length == 0)
        //{
        //    Point averagePoint = new Point();
        //    averagePoint.X = eyes[0].X;
        //    averagePoint.Y = eyes[0].Y - 2;
        //    faces = new OpenCvSharp.Rect[1];
        //    faces[0] = new OpenCvSharp.Rect(averagePoint, new Size(80, 80));
        //}


        //// due to cascade method not being rotation invarient, rotate frame and check if face is there.
        //Mat clockRot = new Mat();
        //// If no faces were detected, rotate image.
        //if (faces.Length == 0)
        //{
        //    clockRot = RotateFrame(gray, 40);
        //    faces = haarCascade.DetectMultiScale(
        //                    clockRot, 1.15, 5, HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage, new Size(60, 60));
        //}
        //if (faces.Length == 0)
        //{
        //    clockRot = RotateFrame(gray, -40);
        //    faces = haarCascade.DetectMultiScale(
        //                clockRot, 1.15, 5, HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage, new Size(60, 60));

        //}

        //// For each face detected, add square around feature, and check for eyes.
        foreach (var faceRect in faces)
        {
            // Obain area face is contained.
            var faceMat = new Mat(frame, faceRect);
            // Set color of rectangle for face.
            var color = Scalar.FromRgb(0, 0, 255);
            // Draw rectangle around face. - remove once edge is found.
            Cv2.Rectangle(frame, faceRect, color, 3);
        }

        //foreach (var eyeRect in eyes)
        //{
        //    var eyeMat = new Mat(gray, eyeRect);
        //    var color = Scalar.FromRgb(0, 255, 0);
        //    Cv2.Rectangle(frame, eyeRect, color, 3);
        //}

        // This segment is to draw contours around an image and display it on the OpenCv window.

        // Blur the image.
        Cv2.Blur(gray, gray, new Size(3, 3));
        // Detect edges using canny - Lower number = more contours.
        Cv2.Canny(gray, cannyOutput, 100, 255);
        // Cv2.Threshold(gray, cannyOutput,100,255,ThresholdTypes.BinaryInv);
        // Find contours

        Cv2.FindContours(cannyOutput, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxTC89L1, new Point(0, 0));
        // Draw contours

        //Parallel.For(0, contours.Length, i =>
        //{
        // Cv2.DrawContours(frame, contours, i, Scalar.White, 2, LineTypes.Filled, hierarchy);

        //});
        if (contours != null)
        {
            for (int i = 0; i < contours.Length; i++)
            {
                Cv2.DrawContours(frame, contours, i, Scalar.White, 2, LineTypes.Link8, hierarchy);
                if (Input.GetKeyDown("space"))
                {
                    Application.Quit();
                }
            }
        }
        //Debug.Log("Drawn contour.");
        // Update video.
        Cv2.ImShow("Contours", frame);



        //    frame.SaveImage("image",);
        //     Cv2.ImWrite("image.jpg", frame);

        //    snapFrame =  Resources.Load("image.jpg") as Texture2D;

        //Debug.Log(snapFrame.dimension);
    }
Exemple #19
0
        public static void RunFacialDetection(CancellationTokenSource cts, FaceDetectionService service)
        {
            string haarCascadeFile = RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? _piSettings.LinuxPathToFaceHaarCascade
                                                                            : _piSettings.DevPathToFaceHaarCascade;

            Console.WriteLine("Path to cascade classifier: " + haarCascadeFile);
            if (!File.Exists(haarCascadeFile))
            {
                Console.WriteLine("NO HAAR FILE FOUND");
                return;
            }

            Mat      sourceImg           = new Mat();
            DateTime lastFaceTime        = DateTime.Now;
            bool     wasSearchingForFace = true;

            VideoCapture captureInstance = new VideoCapture(0);

            while (!captureInstance.IsOpened())
            {
                Console.WriteLine("Video Capture being reopened.");
                captureInstance.Open(0);
                Thread.Sleep(500);
            }
            using (CascadeClassifier cascade = new CascadeClassifier(haarCascadeFile))
            //using (Window webCamWindow = new Window("webCamWindow"))
            {
                while (!cts.IsCancellationRequested)
                {
                    captureInstance.Read(sourceImg);
                    if (sourceImg.Empty())
                    {
                        break;
                    }

                    var grayImage = new Mat();
                    Cv2.CvtColor(sourceImg, grayImage, ColorConversionCodes.BGRA2GRAY);
                    Cv2.EqualizeHist(grayImage, grayImage);

                    var faces = cascade.DetectMultiScale(
                        image: grayImage,
                        scaleFactor: _faceDetectionConfiguration.ScaleFactor,
                        minNeighbors: _faceDetectionConfiguration.MinimumNeighbors,
                        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        minSize: new Size(_faceDetectionConfiguration.MinimumFaceWidth, _faceDetectionConfiguration.MinimumFaceHeight)
                        );

                    if (faces.Length > 0)
                    {
                        lastFaceTime = DateTime.Now;
                        if (wasSearchingForFace)
                        {
                            service.PublishFaceDetectionStats(new FaceDetectionStats
                            {
                                FaceRectangles = faces.Select(face => new Shared.FaceRect
                                {
                                    X      = face.X,
                                    Y      = face.Y,
                                    Width  = face.Width,
                                    Height = face.Height
                                }).ToList()
                            });
                            wasSearchingForFace = false;
                        }
                    }
                    else if (DateTime.Now - lastFaceTime >= TimeSpan.FromSeconds(_faceDetectionConfiguration.FaceTimeoutInSeconds))
                    {
                        if (!wasSearchingForFace)
                        {
                            service.PublishFaceDetectionStats(new FaceDetectionStats
                            {
                                IsZeroFaceAlert = true
                            });
                        }
                        wasSearchingForFace = true;
                    }
                }
            }
        }
Exemple #20
0
        static void Main(string[] args)
        {
            var srcImage = new Mat(@"..\..\Images\Test.jpg");

            Cv2.ImShow("Source", srcImage);
            Cv2.WaitKey(1); // do events

            var grayImage = new Mat();

            Cv2.CvtColor(srcImage, grayImage, ColorConversion.BgrToGray);
            Cv2.EqualizeHist(grayImage, grayImage);

            var cascade       = new CascadeClassifier(@"..\..\Data\haarcascade_frontalface_alt.xml");
            var nestedCascade = new CascadeClassifier(@"..\..\Data\haarcascade_eye_tree_eyeglasses.xml");

            var faces = cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.1,
                minNeighbors: 2,
                flags: HaarDetectionType.Zero | HaarDetectionType.ScaleImage,
                minSize: new Size(30, 30)
                );

            Console.WriteLine("Detected faces: {0}", faces.Length);

            var rnd   = new Random();
            var count = 1;

            foreach (var faceRect in faces)
            {
                var detectedFaceImage = new Mat(srcImage, faceRect);
                Cv2.ImShow(string.Format("Face {0}", count), detectedFaceImage);
                Cv2.WaitKey(1); // do events

                var color = Scalar.FromRgb(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
                Cv2.Rectangle(srcImage, faceRect, color, 3);


                var detectedFaceGrayImage = new Mat();
                Cv2.CvtColor(detectedFaceImage, detectedFaceGrayImage, ColorConversion.BgrToGray);
                var nestedObjects = nestedCascade.DetectMultiScale(
                    image: detectedFaceGrayImage,
                    scaleFactor: 1.1,
                    minNeighbors: 2,
                    flags: HaarDetectionType.Zero | HaarDetectionType.ScaleImage,
                    minSize: new Size(30, 30)
                    );

                Console.WriteLine("Nested Objects[{0}]: {1}", count, nestedObjects.Length);

                foreach (var nestedObject in nestedObjects)
                {
                    var center = new Point
                    {
                        X = Cv.Round(nestedObject.X + nestedObject.Width * 0.5) + faceRect.Left,
                        Y = Cv.Round(nestedObject.Y + nestedObject.Height * 0.5) + faceRect.Top
                    };
                    var radius = Cv.Round((nestedObject.Width + nestedObject.Height) * 0.25);
                    Cv2.Circle(srcImage, center, radius, color, thickness: 3);
                }

                count++;
            }

            Cv2.ImShow("Haar Detection", srcImage);
            Cv2.WaitKey(1); // do events


            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
            srcImage.Dispose();
        }
        /// <summary>
        /// Detector
        /// </summary>
        /// <param name="inputTexture">Input Unity texture</param>
        /// <param name="texParams">Texture parameters (flipped, rotated etc.)</param>
        /// <param name="detect">Flag signalling whether we need detection on this frame</param>
        public virtual void ProcessTexture(T texture, Unity.TextureConversionParams texParams, bool detect = true)
        {
            // convert Unity texture to OpenCv::Mat
            ImportTexture(texture, texParams);

            // detect
            if (detect)
            {
                double invF = 1.0 / appliedFactor;
                DataStabilizer.ThresholdFactor = invF;

                // convert to grayscale and normalize
                Mat gray = new Mat();
                Cv2.CvtColor(processingImage, gray, ColorConversionCodes.BGR2GRAY);

                // fix shadows
                Cv2.EqualizeHist(gray, gray);

                /*Mat normalized = new Mat();
                 * CLAHE clahe = CLAHE.Create();
                 * clahe.TilesGridSize = new Size(8, 8);
                 * clahe.Apply(gray, normalized);
                 * gray = normalized;*/

                // detect matching regions (faces bounding)
                Rect[] rawFaces = cascadeFaces.DetectMultiScale(gray, 1.2, 6);
                if (Faces.Count != rawFaces.Length)
                {
                    Faces.Clear();
                }

                // now per each detected face draw a marker and detect eyes inside the face rect
                int facesCount = 0;
                for (int i = 0; i < rawFaces.Length; ++i)
                {
                    Rect faceRect       = rawFaces[i];
                    Rect faceRectScaled = faceRect * invF;
                    using (Mat grayFace = new Mat(gray, faceRect))
                    {
                        // another trick: confirm the face with eye detector, will cut some false positives
                        if (cutFalsePositivesWithEyesSearch && null != cascadeEyes)
                        {
                            Rect[] eyes = cascadeEyes.DetectMultiScale(grayFace);
                            if (eyes.Length == 0 || eyes.Length > 2)
                            {
                                continue;
                            }
                        }

                        // get face object
                        DetectedFace face = null;
                        if (Faces.Count < i + 1)
                        {
                            face = new DetectedFace(DataStabilizer, faceRectScaled);
                            Faces.Add(face);
                        }
                        else
                        {
                            face = Faces[i];
                            face.SetRegion(faceRectScaled);
                        }

                        // shape
                        facesCount++;
                        if (null != shapeFaces)
                        {
                            Point[] marks = shapeFaces.DetectLandmarks(gray, faceRect);

                            // we have 68-point predictor
                            if (marks.Length == 68)
                            {
                                // transform landmarks to the original image space
                                List <Point> converted = new List <Point>();
                                foreach (Point pt in marks)
                                {
                                    converted.Add(pt * invF);
                                }

                                // save and parse landmarks
                                face.SetLandmarks(converted.ToArray());
                            }
                        }
                    }
                }

                // log
                //UnityEngine.Debug.Log(String.Format("Found {0} faces", Faces.Count));
            }
        }
Exemple #22
0
        //车牌颜色定位
        public static List <Mat> PlateLocateByColor(Mat matIn, ParameterList pl)
        {
            List <Mat> matPlates = new List <Mat>();


            Mat matHsv = matIn.CvtColor(ColorConversionCodes.BGR2HSV);

            //对v均衡化后在合并
            Mat[] matToHsv = new Mat[3];
            Cv2.Split(matHsv, out matToHsv);
            Cv2.EqualizeHist(matToHsv[2], matToHsv[2]);
            Mat equalizeHistHsv = new Mat();

            Cv2.Merge(matToHsv, equalizeHistHsv);

            //在均衡化后的hsv颜色空间红寻找黄色和蓝色区域
            Mat    matYellow  = new Mat();
            Mat    matBlue    = new Mat();
            Scalar yellow_low = new Scalar(15, 70, 70);
            Scalar yellow_up  = new Scalar(40, 255, 255);
            Scalar blue_low   = new Scalar(100, 70, 70);
            Scalar blue_up    = new Scalar(140, 255, 255);

            Cv2.InRange(equalizeHistHsv, yellow_low, yellow_up, matYellow);
            Cv2.InRange(equalizeHistHsv, blue_low, blue_up, matBlue);
            Mat matAll = matBlue + matYellow;

            //使用形态学操作对选定颜色区域进行处理
            Mat matAllDilate = new Mat();
            Mat matAllErode  = new Mat();
            Mat element      = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(7, 3));

            Cv2.Dilate(matAll, matAllDilate, element);
            Cv2.Erode(matAllDilate, matAllErode, element);


            //寻找轮廓
            OpenCvSharp.Point[][] contours;         //vector<vector<Point>> contours;
            HierarchyIndex[]      hierarchyIndexes; //vector<Vec4i> hierarchy;
            Cv2.FindContours(
                matAllErode,
                out contours,
                out hierarchyIndexes,
                mode: RetrievalModes.Tree,
                method: ContourApproximationModes.ApproxSimple); //求轮廓

            //求轮廓的最小外接矩形
            List <Rect> rects = new List <Rect>();

            foreach (OpenCvSharp.Point[] p in contours)
            {
                Rect rect = Cv2.BoundingRect(p);
                if ((double)rect.Height / rect.Width > pl.HeightDivideWidthLow && (double)rect.Height / rect.Width < pl.HeightDivedeWidthUp &&
                    rect.Height > pl.HeightLow && rect.Height < pl.HeightUp &&
                    rect.Width > pl.WidthLow && rect.Width < pl.WidthUp)
                {
                    rects.Add(rect);
                }
            }

            if (rects.Count == 0)
            {
                return(matPlates);
            }

            for (int index = 0; index < rects.Count; index++)
            {
                Mat roi = new Mat(matIn, rects[index]);
                matPlates.Add(roi);
            }
            return(matPlates);
        }
Exemple #23
0
        public void Execute(TextDlg text)
        {
            Output = Input.Clone();

            InputBuffer.Add(Input);
            BufferState++;
            BufferState %= BufferSize;
            TimesBuffer.Add(DateTime.Now - StartTime);

            if (!LockFace)
            {
                var input_g = new Mat();
                Cv2.CvtColor(Input, input_g, ColorConversionCodes.BGR2GRAY);
                Cv2.EqualizeHist(input_g, input_g);

                var detect = Classifier.DetectMultiScale(input_g, minNeighbors: 4, minSize: new Size(50, 50),
                                                         flags: HaarDetectionType.ScaleImage);

                if (detect.Length > 0)
                {
                    var biggest = detect.OrderByDescending(r => r.Width * r.Height).First();

                    if (CalcShift(biggest) > SHIFT_THRESHOLD)
                    {
                        Face = biggest;
                        InputBuffer.Clear();
                    }
                }
            }

            if (Face == OpenCvSharp.Rect.Empty)
            {
                AverageColorBuffer.Add(0);
                return;
            }

            var forehead = GetSlice(0.5, 0.15, 0.38, 0.14);

            Rect(Face, BLUE);
            Rect(forehead, GREEN);

            if (Colorify)
            {
                // todo
            }

            var fh_average = CalcMeanColor(forehead);

            AverageColorBuffer.Add(fh_average);

            var num_samples = TimesBuffer.Count;

            if (num_samples > MIN_SAMPLE_COUNT)
            {
                var time_start = TimesBuffer[0];
                var time_end   = TimesBuffer[TimesBuffer.Count - 1];

                LastFPS = FPS;
                var dt = time_end - time_start;
                if (dt != TimeSpan.Zero)
                {
                    var fps = num_samples / dt.TotalSeconds;
                    FPS = (int)(fps * FPS_SMOOTHING + LastFPS * (1 - FPS_SMOOTHING));
                }

                var linear = MathNet.Numerics.Generate.LinearSpaced(num_samples, time_start.TotalSeconds, time_end.TotalSeconds);

                var window = MathNet.Numerics.Window.Hamming(num_samples);

                var interp =
                    window.Zip(linear.Select(i => MathNet.Numerics.Interpolate.Linear(TimesBuffer.Select(x => x.TotalSeconds), AverageColorBuffer).Interpolate(i)), (a, b) => a - b).ToList();

                Deviation = interp.Select(x => x - ProgressiveMean(interp)).ToList();

                var fourier_raw = Deviation.Select(x => new Complex32((float)x, 0)).ToArray();

                MathNet.Numerics.IntegralTransforms.Fourier.Forward(fourier_raw);

                Fourier = fourier_raw.Select(x => Math.Abs(x.Real)).ToArray();

                Frequencies = Enumerable.Range(0, (int)Math.Ceiling(num_samples / 2d + 1)).Select(x => x * 60.0 * FPS / num_samples).ToArray();

                try
                {
                    var zip = Frequencies.Zip(Fourier, (a, b) => new { a, b })
                              .Where(x => x.a > BPM_LOW && x.b < BPM_HIGH).ToList();

                    Frequencies = zip.Select(x => x.a).ToArray();
                    Fourier     = zip.Select(x => x.b).ToArray();

                    BPMPosition = Array.IndexOf(Fourier, Fourier.Max());
                    BPM         = Frequencies[BPMPosition];

                    BPMBuffer.Add(BPM);

                    CorrectBPM = ProgressiveMean(BPMBuffer);
                    text($"d={Deviation.Last():F3}", 20, 120);
                }
                catch
                {
                    ;
                }

                if (FPS != 0)
                {
                    Gap = (BufferSize - num_samples) / FPS;
                }
            }
        }
        private void GrabFace(Mat mat)
        {
            var srcImage = mat;

            _(String.Format("Loading image..."));
            Cv2.WaitKey(1);

            var grayImage = new Mat();

            // _("Generating greyscale");
            Cv2.CvtColor(srcImage, grayImage, ColorConversionCodes.BGRA2GRAY);
            // _("Creating histogram...");
            Cv2.EqualizeHist(grayImage, grayImage);
            // _("Loading Haar cascade...");
            var cascade = new CascadeClassifier("D:\\cascades\\haarcascade_frontalface_default.xml");
            // var nestedCascade = new CascadeClassifier("D:\\cascades\\haarcascade_eye_tree_eyeglasses.xml");
            // _("Detecting faces...");

            var _faces = cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.1,
                minNeighbors: 3,
                flags: HaarDetectionType.DoCannyPruning | HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new OpenCvSharp.Size(30, 30)
                );

            // _(String.Format("Faces detected: {0}", _faces.Length));

            if (_faces.Length > 0)
            {
                _("Face found! Extracting...");
                foreach (var faceRect in _faces)
                {
                    var faceBitmap = new Bitmap(mat.ToBitmap());
                    using (var g = Graphics.FromImage(faceBitmap))
                    {
                        g.FillRectangle(new SolidBrush(System.Drawing.Color.FromArgb(200, 0, 0, 0)), g.ClipBounds);

                        var br = new SolidBrush(System.Drawing.Color.FromArgb(200, System.Drawing.Color.LightGreen));

                        var box = new System.Drawing.Rectangle(faceRect.X, faceRect.Y, faceRect.Width, faceRect.Height);

                        g.DrawRectangle(Pens.LightGreen, box);

                        int rTop = faceRect.Top + faceRect.Height;

                        g.FillRectangle(br, faceRect.Left, rTop, faceRect.Width, faceRect.Height);

                        ImgPreviewBox.Source = faceBitmap.ToBitmapSource();
                    }



                    count++;
                    var detectedFaceImage = new Mat(srcImage, faceRect);

                    if (groupName != "" && personName != "")
                    {
                        String iPath = String.Format("{0}{1}\\{2}\\{3}.png", baseDir, groupName, personName, count);
                        _(String.Format("Saving {0}...", iPath));
                        imagePaths.Add(iPath);
                        detectedFaceImage.SaveImage(iPath);
                        GeneratePreview(iPath);
                    }
                }
            }
        }
Exemple #25
0
        public CameraThread(int id = 0) : base($"camera-thread_{id}")
        {
            UpdateInterval = 0;
            SerialPort.Open();
            ThreadStarted += new EventHandler <ThreadService>((obj, service) =>
            {
                try
                {
                    _capture = new VideoCapture(0);
                    if (_capture.Read(_mat))
                    {
                        _capture.Read(_mat);
                        if (_mat?.Width == 0)
                        {
                            Stop();
                            MessageBox.Show("Camera Unavailable");
                        }
                    }
                }
                catch
                {
                    Stop();
                    MessageBox.Show("Camera Unavailable");
                }
            });

            Update += new EventHandler <ThreadService>((obj, service) =>
            {
                if (_capture.Read(_mat))
                {
                    var grayImage = new Mat();
                    _mat          = _mat.Flip(FlipMode.Y);
                    Cv2.CvtColor(_mat, grayImage, ColorConversionCodes.BGRA2GRAY);
                    Cv2.EqualizeHist(grayImage, grayImage);

                    var faces = _cascade.DetectMultiScale(
                        image: grayImage,
                        scaleFactor: 1.6,
                        minNeighbors: 3,
                        flags: HaarDetectionType.DoCannyPruning | HaarDetectionType.ScaleImage,
                        minSize: new OpenCvSharp.Size(30, 30)
                        );

                    var biggestFace = faces.FirstOrDefault();
                    var faceSize    = biggestFace.Size.Width * biggestFace.Size.Height;

                    foreach (var faceRect in faces)
                    {
                        if (faceRect.Width * faceRect.Height > biggestFace.Width * biggestFace.Height)
                        {
                            biggestFace = faceRect;
                            faceSize    = biggestFace.Width * biggestFace.Height;
                        }
                    }
                    if (faceSize > 0)
                    {
                        if (_previousFaceSize == faceSize)
                        {
                            _count++;
                        }
                        else
                        {
                            _count = 0;
                        }
                        _previousFaceSize = faceSize;
                    }
                    else
                    {
                        _count = 0;
                    }
                    if (_count > 3)
                    {
                        int x  = biggestFace.X + biggestFace.Width / 2;
                        int y  = biggestFace.Y + biggestFace.Height / 2 - biggestFace.Height / 4;
                        int dx = x - _lastX;
                        int dy = y - _lastY;
                        _lastX = x;
                        _lastY = y;

                        if (Math.Abs(dx) < 50)
                        {
                            x += 40 * Math.Sign(dx);
                        }
                        if (Math.Abs(dy) < 50)
                        {
                            y += 40 * Math.Sign(dy);
                        }

                        Cv2.Rectangle(_mat, biggestFace, Scalar.Red, 3);
                        SerialPort.Write(
                            $"{getH(x, _previousFaceSize)};" +
                            $"{getV(y, _previousFaceSize)}");
                    }

                    // Turns proccessed camera image into bitmap and passes it to form.
                    if (!_doCancel)
                    {
                        ImageProcessed?.Invoke(this, _mat.ToBitmap());
                    }
                }

                // Horizontal linear equation.
                // h(x) = ax + c
                int getH(int x, int size)
                {
                    double distance = Math.Sqrt(size) - 65;
                    double a        = (0.0726 - (37.899 / 1000_000) * distance);
                    double c        = 75.502 - (12.651 / 1000) * distance;
                    return((int)(a * x + c));
                }

                // Vertical linear equation.
                // v(y) = ay + c
                int getV(int y, int size)
                {
                    double distance = Math.Sqrt(size) - 65;
                    double a        = (0.0714 - (39.343 / 1000_000) * distance);
                    double c        = 61.553 + (81.848 / 1000) * distance;
                    return((int)(a * y + c));
                }


                // Useful for calibration to avoid servo sliding
                //if (MoveServos)
                //{
                //    SerialPort.Write($"{H - 40};{V - 40}");
                //    Thread.Sleep(400);
                //    SerialPort.Write($"{H};{V}");
                //    MoveServos = false;
                //}
            });
        }
Exemple #26
0
        static void Main(string[] args)
        {
            Mat mat1 = null, mat2 = null, mat3 = null;

            var frame   = new Mat();
            var capture = new VideoCapture(0);

            capture.Open(0);

            if (capture.IsOpened())
            {
                while (true)
                {
                    capture.Read(frame);
                    Console.WriteLine("read");

                    mat3 = frame;

                    if (mat1 != null)
                    {
                        //var image = DiffImage(mat3, mat2, mat1).ToBitmap();
                        Mat d1 = new Mat();
                        Cv2.Absdiff(mat3, mat2, d1);

                        var image = mat3.ToBitmap();

                        //image.Save("1.jpg", ImageFormat.Jpeg);
                        //Console.WriteLine("save");

                        var grayImage = new Mat();
                        Cv2.CvtColor(mat3, grayImage, ColorConversionCodes.BGRA2GRAY);
                        Cv2.EqualizeHist(grayImage, grayImage);

                        var cascade       = new CascadeClassifier(@".\CascadeClassifiers\haarcascade_frontalface_alt2.xml");
                        var nestedCascade = new CascadeClassifier(@".\CascadeClassifiers\haarcascade_eye_tree_eyeglasses.xml");

                        var faces = cascade.DetectMultiScale(
                            image: grayImage,
                            scaleFactor: 1.1,
                            minNeighbors: 2,
                            flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                            minSize: new Size(30, 30)
                            );

                        Console.WriteLine("Detected faces: {0}", faces.Length);

                        var srcImage = mat3;

                        //new BodyDetector().Detect(mat3);
                        faces.ToList().ForEach(e => e.Height += 10);
                        foreach (var faceRect in faces)
                        {
                            Cv2.Rectangle(frame, faceRect, Scalar.Red, 2);
                            //var a = new Mat(srcImage, faceRect);
                            //var eigenValues = OutputArray.Create(a);
                            //var eigenVectors = OutputArray.Create(a);
                            //Cv2.Eigen(a, eigenValues, eigenVectors);
                        }

                        Cv2.ImShow("Source", mat3);
                        Cv2.WaitKey(1); // do events

                        //var count = 1;
                        //foreach (var faceRect in faces)
                        //{
                        //    var detectedFaceImage = new Mat(srcImage, faceRect);
                        //    Cv2.ImShow(string.Format("Face {0}", count), detectedFaceImage);
                        //    Cv2.WaitKey(1); // do events

                        //    //var color = Scalar.FromRgb(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
                        //    Cv2.Rectangle(srcImage, faceRect, Scalar.Red, 3);

                        //    var detectedFaceGrayImage = new Mat();
                        //    Cv2.CvtColor(detectedFaceImage, detectedFaceGrayImage, ColorConversionCodes.BGRA2GRAY);
                        //    var nestedObjects = nestedCascade.DetectMultiScale(
                        //        image: detectedFaceGrayImage,
                        //        scaleFactor: 1.1,
                        //        minNeighbors: 2,
                        //        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        //        minSize: new Size(30, 30)
                        //    );

                        //    Console.WriteLine("Nested Objects[{0}]: {1}", count, nestedObjects.Length);

                        //    foreach (var nestedObject in nestedObjects)
                        //    {
                        //        var center = new Point
                        //        {
                        //            X = (int)(Math.Round(nestedObject.X + nestedObject.Width * 0.5, MidpointRounding.ToEven) + faceRect.Left),
                        //            Y = (int)(Math.Round(nestedObject.Y + nestedObject.Height * 0.5, MidpointRounding.ToEven) + faceRect.Top)
                        //        };
                        //        var radius = Math.Round((nestedObject.Width + nestedObject.Height) * 0.25, MidpointRounding.ToEven);
                        //        Cv2.Circle(srcImage, center, (int)radius, Scalar.Red, thickness: 3);
                        //    }

                        //    count++;
                        //}

                        //Cv2.ImShow("Haar Detection", srcImage);
                        //Cv2.WaitKey(1); // do events
                    }

                    mat1 = mat2;
                    mat2 = mat3;
                }
            }
        }
    private void DemoFaceTrack()
    {
        int ColorWidth  = kinectManager.ColorWidth;
        int ColorHeight = kinectManager.ColorHeight;

        Mat colorImage = new Mat(kinectManager.ColorHeight, ColorWidth, MatType.CV_8UC4, kinectManager.ColorRawData);              //rows=height, cols=width
        Mat grayImage  = new Mat();

        Cv2.CvtColor(colorImage, grayImage, ColorConversionCodes.RGBA2GRAY);
        Cv2.EqualizeHist(grayImage, grayImage);

        OpenCvSharp.Rect[] faces = cascade.DetectMultiScale(
            image: grayImage,
            scaleFactor: 1.1,
            minNeighbors: 2,
            flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            minSize: new Size(30, 30)
            );


        for (int i = 0; i < faces.Length; i++)
        {
            OpenCvSharp.Rect faceRect = faces[i];

            //outline overall face in image
            var rndColor = Scalar.FromRgb(
                UnityEngine.Random.Range(0, 255),
                UnityEngine.Random.Range(0, 255),
                UnityEngine.Random.Range(0, 255)
                );
            Cv2.Rectangle(colorImage, faceRect, rndColor, 3);

            //now do nested features like the eyes
            Mat subFaceImage          = new Mat(colorImage, faceRect);
            Mat detectedFaceGrayImage = new Mat();
            Cv2.CvtColor(subFaceImage, detectedFaceGrayImage, ColorConversionCodes.RGBA2GRAY);
            var nestedObjects = nestedCascade.DetectMultiScale(
                image: detectedFaceGrayImage,
                scaleFactor: 1.1,
                minNeighbors: 2,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(30, 30)
                );

            //display each nested face feature
            foreach (var nestedObject in nestedObjects)
            {
                var center = new Point
                {
                    X = (int)(Math.Round(nestedObject.X + nestedObject.Width * 0.5, MidpointRounding.ToEven) + faceRect.Left),
                    Y = (int)(Math.Round(nestedObject.Y + nestedObject.Height * 0.5, MidpointRounding.ToEven) + faceRect.Top)
                };
                var radius = Math.Round((nestedObject.Width + nestedObject.Height) * 0.25, MidpointRounding.ToEven);
                Cv2.Circle(colorImage, center, (int)radius, rndColor, thickness: 3);
            }
        }


        //Convert back to RGBA32
        //Mat faceImageOut = new Mat(IRWidth, IRHeight, MatType.CV_8UC4);
        //Cv2.CvtColor(colorImage, faceImageOut, ColorConversionCodes.RGBA2RGBA);

        //load onto texture
        byte[] rawTextureBytes = KinectCVUtilities.ConvertMatToBytes(colorImage);
        kinectManager.ColorTexture.LoadRawTextureData(rawTextureBytes);
        kinectManager.ColorTexture.Apply();
    }
        /// <summary>
        /// Process the sample
        /// </summary>
        void Start()
        {
            // convert texture to cv image
            Mat image = Unity.TextureToMat(this.sample);

            // Detect faces
            var gray = image.CvtColor(ColorConversionCodes.BGR2GRAY);

            Cv2.EqualizeHist(gray, gray);

            // detect matching regions (faces bounding)
            Rect[] rawFaces = cascadeFaces.DetectMultiScale(gray, 1.1, 6);

            // now per each detected face draw a marker and detect eyes inside the face rect
            foreach (var faceRect in rawFaces)
            {
                var grayFace = new Mat(gray, faceRect);
                if (requiredSize.Width > 0 && requiredSize.Height > 0)
                {
                    grayFace = grayFace.Resize(requiredSize);
                }

                // now try to recognize the face:
                // "confidence" here is actually a misguide. in fact, it's "distance from the sample to the closest known face" where
                // exact metric is not disclosed in the docs, but checking returned values I found "confidence" to be like 70-100 for
                // positive match with LBPH algo and more like 700-1200 for positive match with EigenFaces/FisherFaces. Unfortunately,
                // all that data isn't much helpful for real life as you don't get adequate % of the confidence, the only thing you
                // actually know is "less is better" with 0 being some "ideal match"
                int    label      = -1;
                double confidence = 0.0;
                recognizer.Predict(grayFace, out label, out confidence);

                bool   found      = confidence < 1200;
                Scalar frameColor = found ? Scalar.LightGreen : Scalar.Red;
                Cv2.Rectangle((InputOutputArray)image, faceRect, frameColor, 2);

                int          line        = 0;
                const int    textPadding = 2;
                const double textScale   = 2.0;
                string       messge      = String.Format("{0}", names[label], (int)confidence);
                var          textSize    = Cv2.GetTextSize(messge, HersheyFonts.HersheyPlain, textScale, 1, out line);
                var          textBox     = new Rect(
                    faceRect.X + (faceRect.Width - textSize.Width) / 2 - textPadding,
                    faceRect.Bottom,
                    textSize.Width + textPadding * 2,
                    textSize.Height + textPadding * 2
                    );

                Cv2.Rectangle((InputOutputArray)image, textBox, frameColor, -1);
                image.PutText(messge, textBox.TopLeft + new Point(textPadding, textPadding + textSize.Height), HersheyFonts.HersheyPlain, textScale, Scalar.Black, 2);
            }

            // Render texture
            var texture  = Unity.MatToTexture(image);
            var rawImage = gameObject.GetComponent <UnityEngine.UI.RawImage>();

            rawImage.texture = texture;

            var transform = gameObject.GetComponent <UnityEngine.RectTransform>();

            transform.sizeDelta = new UnityEngine.Vector2(image.Width, image.Height);
        }
Exemple #29
0
        public static double?GetRotationAngle(string inputFileFolderPath, string inputFileName, bool showMessageBoxes, ImageBox iboxRaw, ImageBox iboxProcessed, out double msecElapsed)
        {
            double?rotationAngle = null;

            msecElapsed = 0;

            // Hough algo does a bad job detecting horizontal lines. So we rotate the image by a set amount before running the Hough.
            double houghRotationOffsetAngle = 25.0;

            try
            {
                Stopwatch stopWatch = new Stopwatch();
                stopWatch.Start();

                if (iboxProcessed != null)
                {
                    iboxProcessed.Image = null;
                    iboxProcessed.Refresh();
                }

                Mat rotated = new Mat();

                Mat src = new Mat(inputFileFolderPath + inputFileName, ImreadModes.Grayscale);

                if (showMessageBoxes && iboxRaw != null)
                {
                    iboxRaw.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(src);
                }


                // Not needed if we read as grayscale to start with.
                //Mat src8UC1 = new Mat();
                //src.ConvertTo(src8UC1, MatType.CV_8UC1);

                // I'm not sure why we do the gauss - It seems like everyone does it, it's cheap, so we do it. ~Ed
                Mat gauss = new Mat();
                Cv2.GaussianBlur(src, gauss, new OpenCvSharp.Size(3, 3), 2, 2);
                LogEvent("gauss", showMessageBoxes, gauss, iboxProcessed);

                // An attempt to get the contrast across the image to be somewhat uniform.
                CLAHE claheFilter = Cv2.CreateCLAHE(4, new OpenCvSharp.Size(10, 10));
                Mat   clahe       = new Mat();
                claheFilter.Apply(gauss, clahe);
                LogEvent("clahe", showMessageBoxes, clahe, iboxProcessed);

                // An attempt to get the contrast across the image to be somewhat uniform.
                Mat hist = new Mat();
                Cv2.EqualizeHist(gauss, hist);
                LogEvent("hist", showMessageBoxes, hist, iboxProcessed);



                // Grab a template from some middle part of the image. Eventually, the size and location of this
                // template will be specified. It is very possible we'll have to grab multiple templates, as the
                // location of the template may impact the accuracy of the rotation.
                // e.g. - if the template is an image of a damaged device (which may happen at any location), the calculated
                // rotation may be wrong. Testing is required.
                // The locations where the template matches will create an image with lines that are offset from 0/90 degrees.
                // This is because we can assume that the devices are orthogonal to one another, even if the image itself is
                // offset rotationally.
                Rect r1       = new Rect(new OpenCvSharp.Point(1000, 1000), new OpenCvSharp.Size(500, 300));
                var  roi      = new Mat(clahe, r1);
                Mat  template = new Mat(new OpenCvSharp.Size(500, 300), MatType.CV_8UC1);
                roi.CopyTo(template);

                LogEvent("template", showMessageBoxes, template, iboxProcessed);


                Mat templateMatch = new Mat();

                Cv2.MatchTemplate(clahe, template, templateMatch, TemplateMatchModes.CCoeffNormed);
                LogEvent("templatematch", showMessageBoxes, templateMatch, iboxProcessed);

                Mat normalized = new Mat();
                normalized = templateMatch.Normalize(0, 255, NormTypes.MinMax);
                normalized.ConvertTo(normalized, MatType.CV_8UC1);
                LogEvent("normalized template match", showMessageBoxes, normalized, iboxProcessed);


                // This winnows down the number of matches.
                Mat thresh = new Mat();
                Cv2.Threshold(normalized, thresh, 200, 255, ThresholdTypes.Binary);
                LogEvent("threshold template match", showMessageBoxes, thresh, iboxProcessed);

                // rotate the image because hough doesn't work very well to find horizontal lines.
                Mat rotatedThresh = new Mat();
                Cv2E.RotateDegrees(thresh, rotatedThresh, houghRotationOffsetAngle);
                LogEvent("rotatedThresh", showMessageBoxes, rotatedThresh, iboxProcessed);

                Mat erode = new Mat();
                Cv2.Erode(rotatedThresh, erode, new Mat());
                LogEvent("erode", showMessageBoxes, erode, iboxProcessed);


                LineSegmentPoint[] segHoughP = Cv2.HoughLinesP(rotatedThresh, 1, Math.PI / 1800, 2, 10, 600);


                Mat imageOutP = new Mat(src.Size(), MatType.CV_8UC3);

                // We're limiting the rotation correction to +/- 10 degrees. So we only care about hough lines that fall within 80 to 100 or 170 to 190
                List <double> anglesNear90 = new List <double>();
                List <double> anglesNear0  = new List <double>();

                foreach (LineSegmentPoint s in segHoughP)
                {
                    try
                    {
                        // Add lines to the image, if we're going to look at it.
                        if (showMessageBoxes)
                        {
                            imageOutP.Line(s.P1, s.P2, Scalar.White, 1, LineTypes.AntiAlias, 0);
                        }

                        var radian = Math.Atan2((s.P1.Y - s.P2.Y), (s.P1.X - s.P2.X));
                        var angle  = ((radian * (180 / Math.PI) + 360) % 360);

                        // We rotated the image because the hough algo does a bad job with small horizontal lines. So we take that rotation back out here.
                        angle += houghRotationOffsetAngle;
                        angle -= 180;

                        if (angle > 80 && angle < 100)
                        {
                            anglesNear90.Add(angle);
                            if (showMessageBoxes)
                            {
                                imageOutP.Line(s.P1, s.P2, Scalar.Red, 1, LineTypes.AntiAlias, 0);
                            }
                        }

                        if (angle > -10 && angle < 10)
                        {
                            anglesNear0.Add(angle);
                            if (showMessageBoxes)
                            {
                                imageOutP.Line(s.P1, s.P2, Scalar.Orange, 1, LineTypes.AntiAlias, 0);
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        // there's always some infinity risk with atan, yes? Maybe. I don't want to fail on horizontal or vertical line edge cases.
                    }
                }

                double meanAngleNear0 = 0;
                if (anglesNear0.Count > 0)
                {
                    meanAngleNear0 = anglesNear0.Mean();
                }

                double meanAngleNear90 = 90;
                if (anglesNear90.Count > 0)
                {
                    meanAngleNear90 = anglesNear90.Mean();
                }



                // Use both the vertical and horizontal to calculate the image angle with a weighted average. It might be more accurate to use median instead of mean here.
                rotationAngle = ((meanAngleNear0) * anglesNear0.Count + (meanAngleNear90 - 90) * anglesNear90.Count) / (anglesNear0.Count + anglesNear90.Count);

                LogEvent("hough lines", showMessageBoxes, imageOutP, iboxProcessed);

                stopWatch.Stop();
                // Get the elapsed time as a TimeSpan value. Less than 400msec in debug mode via IDE.
                TimeSpan ts = stopWatch.Elapsed;
                msecElapsed = ts.TotalMilliseconds;
            }
            catch (Exception ex)
            {
            }

            return(rotationAngle);
        }
        public static void DetectFacesOnImage(string sourceImagePath, string destImagePath)
        {
            var srcImage = new Mat(sourceImagePath);

            // Cv2.ImShow("Source", srcImage);
            Cv2.WaitKey(1); // do events

            var grayImage = new Mat();

            Cv2.CvtColor(srcImage, grayImage, ColorConversionCodes.BGRA2GRAY);
            Cv2.EqualizeHist(grayImage, grayImage);

            var cascade       = new CascadeClassifier(@"./OpenCvSharp/haarcascade_frontalface_alt.xml");
            var nestedCascade = new CascadeClassifier(@"./OpenCvSharp/haarcascade_eye_tree_eyeglasses.xml");

            var faces = cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.1,
                minNeighbors: 2,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(30, 30)
                );

            Console.WriteLine("Detected faces: {0}", faces.Length);

            // Get accessories of the faces
            var rnd   = new Random();
            var count = 1;

            foreach (var faceRect in faces)
            {
                var detectedFaceImage = new Mat(srcImage, faceRect);
                // Cv2.ImShow(string.Format("Face {0}", count), detectedFaceImage);
                Cv2.WaitKey(1); // do events

                // Draw rectangle arounf face
                var color = Scalar.FromRgb(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
                Cv2.Rectangle(srcImage, faceRect, color, 2);

                // Get gray image
                var detectedFaceGrayImage = new Mat();
                Cv2.CvtColor(detectedFaceImage, detectedFaceGrayImage, ColorConversionCodes.BGRA2GRAY);

                var nestedObjects = nestedCascade.DetectMultiScale(
                    image: detectedFaceGrayImage,
                    scaleFactor: 1.1,
                    minNeighbors: 2,
                    flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                    minSize: new Size(30, 30)
                    );

                Console.WriteLine("Nested Objects[{0}]: {1}", count, nestedObjects.Length);

                // Draw circle around accessory
                //foreach (var nestedObject in nestedObjects)
                //{
                //    var center = new Point
                //    {
                //        X = (int)(Math.Round(nestedObject.X + nestedObject.Width * 0.5, MidpointRounding.ToEven) + faceRect.Left),
                //        Y = (int)(Math.Round(nestedObject.Y + nestedObject.Height * 0.5, MidpointRounding.ToEven) + faceRect.Top)
                //    };
                //    var radius = Math.Round((nestedObject.Width + nestedObject.Height) * 0.25, MidpointRounding.ToEven);
                //    Cv2.Circle(srcImage, center, (int)radius, color, thickness: 3);
                //}

                count++;
            }

            // Cv2.ImShow("Haar Detection", srcImage);
            Cv2.WaitKey(1); // do events

            // Save result image
            srcImage.SaveImage(destImagePath);

            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
            srcImage.Dispose();
        }