public static string ReadText(string inputImagePath)
        {
            var           srcImage = new Mat(inputImagePath);
            List <Bitmap> bitmap;
            var           bbox = Cv2.SelectROIs("Select Text Boxes. Enter for confirm, Esc for finish", srcImage);

            Cv2.DestroyWindow("Select Text Boxes. Enter for confirm, Esc for finish");
            if (bbox == null || bbox.Length == 0)
            {
                bitmap = new List <Bitmap>()
                {
                    OpenCvSharp.Extensions.BitmapConverter.ToBitmap(srcImage)
                }
            }
            ;
            else
            {
                bitmap = bbox.Select(x => OpenCvSharp.Extensions.BitmapConverter.ToBitmap(srcImage.SubMat(x))).ToList();
            }

            using (var ocr = new Tesseract.TesseractEngine(@"../../../../OpenCVHavrylov/Data/tessdata", "eng"))
            {
                var res = "";
                int i   = 1;
                foreach (var img in bitmap)
                {
                    using (var page = ocr.Process(img))
                    {
                        res += $"[Block {i}]: {page.GetText()}";
                        i++;
                    }
                }
                return(res);
            }
        }
Exemple #2
0
 private void button3_Click(object sender, EventArgs e)
 {
     colourCalibration.SaveFile();
     Cv2.DestroyWindow(colourWindow.Name);
     cam.Release();
     Close();
 }
        public static void TrackObject()
        {
            var tracker = TrackerMOSSE.Create();
            var cap     = new VideoCapture();

            cap.Open(0);
            var image = new Mat();

            cap.Read(image);
            Cv2.ImShow("Debug", image);
            var bbox   = Cv2.SelectROI("Tracking", image, false);
            var bbox2d = new Rect2d(bbox.X, bbox.Y, bbox.Width, bbox.Height);

            tracker.Init(image, bbox2d);
            while (true)
            {
                var timer   = Cv2.GetTickCount();
                var img     = new Mat();
                var success = cap.Read(img);
                success = tracker.Update(img, ref bbox2d);

                if (success)
                {
                    DrawBox(img, bbox2d);
                }
                else
                {
                    Cv2.PutText(img, "Lost", new OpenCvSharp.Point(100, 75), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 2);
                }

                Cv2.Rectangle(img, new OpenCvSharp.Point(15, 15), new OpenCvSharp.Point(200, 90), new Scalar(255, 0, 255), 2);
                Cv2.PutText(img, "Fps:", new OpenCvSharp.Point(20, 40), HersheyFonts.HersheySimplex, 0.7, new Scalar(255, 0, 255), 2);
                Cv2.PutText(img, "Status:", new OpenCvSharp.Point(20, 70), HersheyFonts.HersheySimplex, 0.7, new Scalar(255, 0, 255), 2);


                var    fps = (int)(Cv2.GetTickFrequency() / (Cv2.GetTickCount() - timer));
                Scalar myColor;
                if (fps > 60)
                {
                    myColor = new Scalar(20, 230, 20);
                }
                else if (fps > 20)
                {
                    myColor = new Scalar(230, 20, 20);
                }
                else
                {
                    myColor = new Scalar(20, 20, 230);
                }
                Cv2.PutText(img, fps.ToString(), new OpenCvSharp.Point(75, 40), HersheyFonts.HersheySimplex, 0.7, myColor, 2);

                Cv2.ImShow("Tracking", img);
                if (Cv2.WaitKey(1) == 113)
                {
                    Cv2.DestroyWindow("Tracking");
                    break;
                }
            }
        }
Exemple #4
0
 public static void Show(WriteableBitmap src)
 {
     using (Mat mat = new Mat())
     {
         //src.ToMat(mat);
         Cv2.ImShow("image", mat);
         Cv2.WaitKey();
         Cv2.DestroyWindow("image");
     }
 }
Exemple #5
0
        static void Main(string[] args)
        {
            Mat src = Cv2.ImRead("OpenCV_Logo.png", ImreadModes.ReducedColor2);

            Cv2.NamedWindow("src", WindowMode.AutoSize);
            Cv2.SetWindowProperty("src", WindowProperty.Fullscreen, 0);
            Cv2.ImShow("src", src);
            Cv2.WaitKey(0);
            Cv2.DestroyWindow("src");
        }
Exemple #6
0
        public static void ShowCV(this WriteableBitmap src)
        {
            using (Mat mat = new Mat(src.PixelHeight, src.PixelWidth, MatType.CV_8UC3))
            {
                src.ToMat(mat);
                Cv2.NamedWindow("image", WindowMode.Normal);
                Cv2.ImShow("image", mat);

                Cv2.WaitKey();
                Cv2.DestroyWindow("image");
            }
        }
Exemple #7
0
        /// <summary>
        /// 裁切选中图片
        /// </summary>
        /// <param name="path">图片路径</param>
        public void PicCut(String path)
        {
            img = new Mat(path, ImreadModes.Color);
            Cv2.ImShow("chosenImg", img);
            //Cv2.SetMouseCallback("img", img_MouseDown);

            MyMouseCallback = new MouseCallback(img_MouseDown);
            Cv2.MoveWindow("chosenImg", 800, 400);
            Cv2.SetMouseCallback("chosenImg", MyMouseCallback);
            Cv2.WaitKey(0);
            Cv2.DestroyWindow("chosenImg");
            Cv2.DestroyAllWindows();
        }
Exemple #8
0
        /// <summary>
        /// 调用OpenCV显示验证码
        /// </summary>
        /// <param name="base64String"></param>
        ///
        public static void DisplayImageFromBase64(string base64String)
        {
            var bytes = Convert.FromBase64String(base64String);
            //totest
            //Emgucv is incompatible with dotnet core try to use OpenCVSharp
            string windowName = "Your Captcha"; //The name of the window

            Cv2.NamedWindow(windowName);        //Create the window using the specific name
            Mat matImg = Mat.FromImageData(bytes, ImreadModes.Color);

            Cv2.Resize(matImg, matImg, new OpenCvSharp.Size(260, 84)); //the dst image size,e.g.100x100

            Cv2.ImShow(windowName, matImg);                            //Show the image
            Cv2.WaitKey(0);                                            //no wait
            Cv2.DestroyWindow(windowName);                             //Destroy the window if key is pressed
        }
Exemple #9
0
        //异步显示验证码图片
        public static Task DisplayImageFromBase64Async(string base64String)
        {
            return(Task.Factory.StartNew(() =>
            {
                var bytes = Convert.FromBase64String(base64String);
                //totest
                //Emgucv is incompatible with dotnet core try to use OpenCVSharp
                string windowName = "Your Captcha";            //The name of the window
                Cv2.NamedWindow(windowName);                   //Create the window using the specific name
                Mat matImg = Mat.FromImageData(bytes);
                Cv2.Resize(matImg, matImg, new Size(260, 84)); //the dst image size,e.g.100x100

                Cv2.ImShow(windowName, matImg);                //Show the image
                Cv2.WaitKey();                                 //no wait,when value great than 0, then wait n seco
                Cv2.DestroyWindow(windowName);                 //Destroy the window if key is pressed
            }));
        }
Exemple #10
0
        public double CompareImageByHist(Mat img, Mat refImg, bool showImage = false)
        {
            Mat imgHsv    = new Mat();
            Mat refImgHsv = new Mat();

            Cv2.CvtColor(img, imgHsv, ColorConversionCodes.RGB2HSV);
            Cv2.CvtColor(refImg, refImgHsv, ColorConversionCodes.RGB2HSV);

            Mat[] imgHsvs    = Cv2.Split(imgHsv);
            Mat[] refImgHsvs = Cv2.Split(refImgHsv);

            int bin1 = 50;
            int bin2 = 60;

            int[] bins = { bin1, bin2 };

            int[] channels = { 0, 1 };

            Rangef[] ranges = new Rangef[]
            {
                new Rangef(0, 180),
                new Rangef(0, 256)
            };

            Mat imgHist    = new Mat(img.Size(), MatType.CV_32FC2);
            Mat refImgHist = new Mat(img.Size(), MatType.CV_32FC2);

            Cv2.CalcHist(imgHsvs, channels, new Mat(), imgHist, 2, bins, ranges, true, false);
            Cv2.Normalize(imgHist, imgHist, 1, 0, NormTypes.MinMax, -1, null);

            Cv2.CalcHist(refImgHsvs, channels, new Mat(), refImgHist, 2, bins, ranges, true, false);
            Cv2.Normalize(refImgHist, refImgHist, 1, 0, NormTypes.MinMax, -1, null);

            double ratio = Cv2.CompareHist(imgHist, refImgHist, HistCompMethods.KLDiv);

            if (showImage == true)
            {
                Mat img1 = img.Clone();
                Cv2.PutText(img1, ratio.ToString(), new Point(50, 50), HersheyFonts.HersheyPlain, 1, new Scalar(0, 255, 0), 2, LineTypes.AntiAlias);
                Cv2.ImShow("CompareHistTestVSRef", img1);
                Cv2.WaitKey();
                Cv2.DestroyWindow("CompareHistTestVSRef");
            }
            return(ratio);
        }
Exemple #11
0
        public void Show()
        {
            if (!IsCamera())
            {
                return;
            }
            Mat frame = new Mat();

            IsShow = true;
            while (Cv2.WaitKey(1) == -1 && IsShow)
            {
                OpeningCamera.Read(frame);
                Cv2.ImShow("Camera", frame);
            }
            Cv2.DestroyWindow("Camera");
            frame.Dispose();
            IsShow = false;
        }
Exemple #12
0
        /// <summary>
        ///     预览图像
        /// </summary>
        /// <param name="imageMat"></param>
        private static void PreviewMatchResultImage(Mat imageMat)
        {
            var       windowName = $"预览窗口{Guid.NewGuid()}";
            const int maxHeight  = 500;

            if (imageMat.Height < maxHeight)
            {
                Cv2.ImShow(windowName, imageMat);
            }
            else
            {
                var radio       = (double)imageMat.Width / imageMat.Height;
                var resizeWidth = maxHeight * radio;
                var size        = new Size(resizeWidth, maxHeight);
                using var resizeMat = new Mat(size, imageMat.Type());
                Cv2.Resize(imageMat, resizeMat, size);
                Cv2.ImShow(windowName, resizeMat);
            }

            Cv2.WaitKey(5000);
            Cv2.DestroyWindow(windowName);
        }
Exemple #13
0
        public int GetBlobs(Mat img, bool showImage = false)
        {
            Mat gray = img.CvtColor(ColorConversionCodes.BGR2GRAY);
            //Mat binary = gray.Threshold(0, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);
            Mat binary = gray.Threshold(50, 255, ThresholdTypes.Binary);


            //2.Define/search ROI area
            Mat labelView          = img.EmptyClone();
            Mat rectView           = binary.CvtColor(ColorConversionCodes.GRAY2BGR);
            ConnectedComponents cc = Cv2.ConnectedComponentsEx(binary);

            if (cc.LabelCount <= 1)
            {
                throw new Exception("no blob found");
            }

            //draw lables
            cc.RenderBlobs(labelView);

            //draw boxes except background
            foreach (var blob in cc.Blobs.Skip(1))
            {
                rectView.Rectangle(blob.Rect, Scalar.Red);
            }

            if (showImage == true)
            {
                using (new Window("blob image", rectView))
                //using (new Window("labelview image", labelView))
                {
                    //Cv2.WaitKey(1000);
                    Cv2.WaitKey(0);
                    Cv2.DestroyWindow("blob image");
                    //Cv2.DestroyWindow("labelview image");
                }
            }
            return(cc.LabelCount - 1);
        }
Exemple #14
0
        public void Demo(Mat img, Mat imgRef)
        {
            //New Mat to show the test result
            Mat showTestResult = new Mat(600, 350, MatType.CV_8UC3, new Scalar(255, 255, 255));

            Cv2.PutText(showTestResult, "TestResult:", new Point(10, 30), HersheyFonts.HersheyComplex, 0.8, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            if (GVar.debugVision == true)
            {
                Cv2.ImShow("TestResult", showTestResult);
                Cv2.WaitKey();
            }

            // ??better to change from the threshold method to erode method to remove small noise but not real defect
            Cv2.Threshold(img, img, 30, 255, ThresholdTypes.Tozero);
            Cv2.Threshold(imgRef, imgRef, 30, 255, ThresholdTypes.Tozero);

            //1. Get blob numbers
            int blobs = GetBlobs(img, GVar.debugVision);

            Cv2.PutText(showTestResult, "1.Blobs: " + blobs, new Point(10, 60), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            if (GVar.debugVision == true)
            {
                Cv2.ImShow("TestResult", showTestResult);
                Cv2.WaitKey();
            }

            //2. Mser sample to find closed area
            int iClosedArea = MserSample(img, GVar.debugVision);

            Cv2.PutText(showTestResult, "2.Closed areas: " + iClosedArea, new Point(10, 90), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            if (GVar.debugVision == true)
            {
                Cv2.ImShow("TestResult", showTestResult);
                Cv2.WaitKey();
            }

            //3.Measure blob areas
            double blob_area = MeasureArea(img, GVar.debugVision);

            Cv2.PutText(showTestResult, "3.Contour Area: " + blob_area.ToString(), new Point(10, 120), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            if (GVar.debugVision == true)
            {
                Cv2.ImShow("TestResult", showTestResult);
                Cv2.WaitKey();
            }

            //4. Get test picture Hue/Saturation/Color with average/Min/Max value
            Cv2.PutText(showTestResult, "4.HSV average value", new Point(10, 150), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            float[][] ffVal     = ColorTestHSV(img);
            string[]  hsvString = { "Hue ave = ", "Sat ave = ", "Lum ave = " };
            for (int k = 0; k < 3; k++)
            {
                GVar.fHSV[k] = ffVal[k][1];  //[0]- min; [1]-average; [2]-max
                Cv2.PutText(showTestResult, hsvString[k] + GVar.fHSV[k].ToString(), new Point(10, 180 + k * 30), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            }
            if (GVar.debugVision == true)
            {
                Cv2.ImShow("TestResult", showTestResult);
                Cv2.WaitKey();
            }

            //6. Uniformity test...

            //5. Compare test and reference picture with Histogram CompareHist method, sensitive to color change
            //double[] Ratios = CompareHist(img, imgRef,GVar.debugVision);
            double ratio = CompareImageByHist(img, imgRef, GVar.debugVision);

            Cv2.PutText(showTestResult, "5.Histogram compare", new Point(10, 270), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            Cv2.PutText(showTestResult, "H/S : " + ratio.ToString(), new Point(10, 300), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            //string[] HSV = { "H", "S", "V" };
            //for (int k = 0; k < 3; k++)
            //{
            //    GVar.G_CompareHist[k] = Ratios[k];
            //    Cv2.PutText(showTestResult, HSV[k] + ": " + GVar.G_CompareHist[k].ToString(), new Point(10, 300 + k * 30), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            //}
            if (GVar.debugVision == true)
            {
                Cv2.ImShow("TestResult", showTestResult);
                Cv2.WaitKey();
            }


            //7. Keypoints method
            float matchRate = MatchTemplate(ImageROI, imgRef, GVar.debugVision);

            Cv2.PutText(showTestResult, "6.KeyPoints: " + matchRate.ToString(), new Point(10, 330), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            Cv2.PutText(showTestResult, "The end of vision test!", new Point(10, 360), HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1, LineTypes.AntiAlias);
            Cv2.ImShow("TestResult", showTestResult);
            Cv2.WaitKey();
            Cv2.DestroyWindow("TestResult");
            Console.WriteLine("  ");


            //7. HoughLineP to detect lines
        }
Exemple #15
0
        public bool Test()
        {
            var plMask     = new PLMask();
            Mat whiteLight = Cv2.ImRead(@"P:\Projects\N3 Imaging\Images\09042018_jewerly\WhiteLight\whitelight.bmp");

            //get phos contours
            var rootDir      = @"P:\Projects\N3 Imaging\Images\09042018_jewerly\Phos";
            var files        = Directory.GetFiles(rootDir, "*.bmp", SearchOption.TopDirectoryOnly);
            Mat combinedMask = null;

            foreach (var file in files)
            {
                var pl = Cv2.ImRead(file);
                Mat mask;

                var res = plMask.PlMask(pl, 20, out mask);
                if (combinedMask == null)
                {
                    combinedMask = Mat.Zeros(mask.Size(), mask.Type());
                }

                Cv2.Add(mask, combinedMask, combinedMask);
            }
            Mat element = Cv2.GetStructuringElement(MorphShapes.Ellipse,
                                                    new OpenCvSharp.Size(9, 9),
                                                    new OpenCvSharp.Point(2, 2));

            Cv2.Dilate(combinedMask, combinedMask, element);

            //find contours on this mask
            Mat[] contours;
            var   hierarchy = new List <Point>();

            Cv2.FindContours(combinedMask, out contours, OutputArray.Create(hierarchy), RetrievalModes.External,
                             ContourApproximationModes.ApproxSimple);

            //remove small size contours
            List <Mat> phosContours = new List <Mat>();

            foreach (var contour in contours)
            {
                if (Cv2.ContourArea(contour) > 400)
                {
                    phosContours.Add(contour);
                }
            }

            //Mat phosMask = Mat.Zeros(combinedMask.Size(), MatType.CV_8UC1);
            //Cv2.DrawContours(phosMask, phosContours, -1, Scalar.White, -1);//filled contours

            //get centers of contours
            for (int i = 0; i < phosContours.Count; i++)
            {
                var c = phosContours[i];
                var m = c.Moments(true);
                var x = m.M10 / m.M00;
                var y = m.M01 / m.M00;
                Cv2.DrawContours(whiteLight, phosContours, i, new Scalar(255, 0, 0), 4);
                Cv2.PutText(whiteLight, "P", new Point(x, y), HersheyFonts.HersheySimplex, 1, new Scalar(0, 0, 255), 4);
            }


            //get fl contours
            rootDir = @"P:\Projects\N3 Imaging\Images\09042018_jewerly\SW Fl";
            files   = Directory.GetFiles(rootDir, "*.bmp", SearchOption.TopDirectoryOnly);
            Mat combinedFlMask = null;

            foreach (var file in files)
            {
                var pl = Cv2.ImRead(file);
                Mat mask;

                var res = plMask.PlMask(pl, 20, out mask);
                if (combinedFlMask == null)
                {
                    combinedFlMask = Mat.Zeros(mask.Size(), mask.Type());
                }

                Cv2.Add(mask, combinedFlMask, combinedFlMask);
            }

            Cv2.ImShow("combinedFlMask", combinedFlMask);
            Cv2.WaitKey(0);
            Cv2.DestroyWindow("combinedFlMask");

            element = Cv2.GetStructuringElement(MorphShapes.Ellipse,
                                                new OpenCvSharp.Size(9, 9),
                                                new OpenCvSharp.Point(2, 2));
            Cv2.Dilate(combinedFlMask, combinedFlMask, element);

            //find contours on this mask
            Cv2.FindContours(combinedFlMask, out contours, OutputArray.Create(hierarchy), RetrievalModes.External,
                             ContourApproximationModes.ApproxSimple);

            //remove small size contours
            List <Mat> flContours = new List <Mat>();

            foreach (var contour in contours)
            {
                if (Cv2.ContourArea(contour) > 400)
                {
                    flContours.Add(contour);
                }
            }

            //Mat flMask = Mat.Zeros(combinedFlMask.Size(), MatType.CV_8UC1);
            //Cv2.DrawContours(flMask, flContours, -1, Scalar.White, -1);//filled contours
            //check for intersection with phos contours
            //if no intersection then label them
            for (int i = 0; i < flContours.Count; i++)
            {
                Mat blankFl = Mat.Zeros(whiteLight.Size(), MatType.CV_8UC1);
                Cv2.DrawContours(blankFl, flContours, i, Scalar.White, -1);
                bool phos = false;
                for (int j = 0; j < phosContours.Count; j++)
                {
                    Mat blankPhos = Mat.Zeros(whiteLight.Size(), MatType.CV_8UC1);
                    Cv2.DrawContours(blankPhos, phosContours, j, Scalar.White, -1);
                    Mat intersection = new Mat();
                    Cv2.BitwiseAnd(blankFl, blankPhos, intersection);
                    //Cv2.ImShow("blankFl", blankFl);
                    //Cv2.ImShow("blankPhos", blankPhos);
                    //Cv2.ImShow("intersection", intersection);
                    //Cv2.WaitKey();
                    if (intersection.Sum()[0] > 0)
                    {
                        phos = true;
                        break;
                    }
                }

                if (!phos)
                {
                    var c = flContours[i];
                    var m = c.Moments(true);
                    var x = m.M10 / m.M00;
                    var y = m.M01 / m.M00;
                    Cv2.DrawContours(whiteLight, flContours, i, new Scalar(255, 0, 0), 4);
                    Cv2.PutText(whiteLight, "F", new Point(x, y), HersheyFonts.HersheySimplex, 1, new Scalar(0, 0, 255), 4);
                }
            }

            Cv2.ImShow("whiteLight", whiteLight);
            Cv2.WaitKey();
            Cv2.DestroyAllWindows();

            return(true);
        }
        private void StartWebCam(BackgroundWorker worker = null)
        {
            if (cap == null)
            {
                cap = new VideoCapture(0);
            }
            if (!cap.Open(0))
            {
                return;
            }
            OpenCvSharp.Cv2.NamedWindow("Video", WindowMode.AutoSize);
            int       cnt      = 0;
            Mat       frame    = new Mat();
            EyePoints rightEye = new EyePoints(true);
            EyePoints leftEye  = new EyePoints(false);

            IsRunning = true;
            while (IsRunning)
            {
                bool result = cap.Read(frame);
                if (!result)
                {
                    worker.CancelAsync();
                    IsRunning = false;
                }
                if (frame != null && (frame.Rows * frame.Cols > 0))
                {
                    cnt++;
                    if (cnt % frameskip == 0)
                    {
                        FrameQueue.Enqueue(frame);
                        cnt = 0;
                    }
                }
                while (FrameQueue.Count > 0)
                {
                    Mat    que   = FrameQueue.Dequeue();
                    Rect[] faces = GetFaces(que, 1);
                    for (int i = 0; i < faces.Length; i++)
                    {
                        //GetFaceInRect(faces[i], que, i);
                        Scalar              eyecolor  = new Scalar(0, 0, 255);
                        Array2D <byte>      gray      = ConvertMatToDlib2DArray(que);
                        FullObjectDetection landmarks = predictor.Detect(gray, ConvertToDlib(faces[i]));
                        InitializeEyes(landmarks, leftEye, rightEye);
                        //DrawEye(que, landmarks, leftEye);
                        //DrawEye(que, landmarks, rightEye);
                        Rect leftboundingBox = BoundingBoxAroundEye(leftEye, 0);
                        DrawRect(que, leftboundingBox);
                        OpenCvSharp.Point centerOfLeftEye = DetectCenterOfEye(que, leftboundingBox);
                        centerOfLeftEye.X += leftboundingBox.X;

                        Rect rightboundingBox = BoundingBoxAroundEye(rightEye, 0);
                        DrawRect(que, rightboundingBox);
                        OpenCvSharp.Point centerOfRightEye = DetectCenterOfEye(que, rightboundingBox);
                        centerOfRightEye.X += rightboundingBox.X;

                        EyeDirection leftEyeDirection  = leftEye.GetEyePosition(centerOfLeftEye);
                        EyeDirection rightEyeDirection = rightEye.GetEyePosition(centerOfRightEye);

                        EyeDirection eyeDirection = EyeDirection.unknown;
                        if (leftEyeDirection == EyeDirection.center || rightEyeDirection == EyeDirection.center)
                        {
                            eyeDirection = EyeDirection.center;
                        }
                        else if (leftEyeDirection == EyeDirection.left)
                        {
                            eyeDirection = EyeDirection.left;
                        }
                        else if (rightEyeDirection == EyeDirection.right)
                        {
                            eyeDirection = EyeDirection.right;
                        }

                        OpenCvSharp.Point position = new OpenCvSharp.Point(50, 50);
                        Cv2.PutText(img: que, text: eyeDirection.ToDisplay(), org: position, fontFace: HersheyFonts.HersheySimplex, fontScale: 2, new Scalar(0, 0, 255));
                    }
                    //BitmapImage bmi = ConvertToBMI(frame, cnt, "D:/junk/TestCamImages");
                    if (worker != null)
                    {
                        //worker.ReportProgress(cnt, bmi);
                        try
                        {
                            OpenCvSharp.Cv2.ImShow("Video", que);
                            int key = Cv2.WaitKey(10);   // as in 10 milliseconds
                            if (key == 27)
                            {
                                worker.CancelAsync();
                                IsRunning = false;
                            }
                        }
                        catch (Exception ex)
                        {
                            string msg = ex.Message;
                        }
                    }
                    if (worker.CancellationPending)
                    {
                        Cv2.DestroyWindow("Video");
                        break;
                    }
                }
            }
        }
Exemple #17
0
        static void Main(string[] args)
        {
            // clear screen
            Console.SetCursorPosition(0, 0);
            Console.Write(new String(' ', 80 * 25));
            Console.SetCursorPosition(0, 0);
            // Console.ReadKey();

            Console.WriteLine("Nagibot for BarrierX by k1death, v2.1.0");
            string configFilename = "config.txt";

            if (args.Length == 1)
            {
                configFilename = args[0];
            }

            Config config = new Config();
            int    res    = config.ReadConfig(configFilename);

            if (res != 1)
            {
                Console.WriteLine("file not exist: " + configFilename);
                Console.WriteLine("usage: NagiBot <config_filename.txt>");
                return;
            }

            try
            {
                //proc = Process.GetProcessesByName("BarrierX")[0];
                proc = Process.GetProcessesByName(config.Procname)[0];
                //int screenshot_num = 0;
                //int skip = 0;

                bool b_init       = false;
                int  fps          = 0;
                int  fps_calc     = 0;
                long fps_time     = 0;
                long fps_time_dif = 0;

                bool skip = false;
                int  debug_frames_count     = 0;
                long debug_frame_time       = 0;
                long debug_next_frame_delay = 500; // millisec
                OCR  ocr = new OCR();
                ocr.Init(config.OCRFontDir);

                //         public static Scalar[] colors;

                /*        public static Scalar ;
                 *      public static Scalar color_green;
                 *      public static Scalar color_blue;
                 *      public static Scalar color_purple;
                 */



                string[] samples = { "",                           "",               "screen0070_right.png",       "screen0021__linecenter.png", "screen0023__linecenter.png",
                                     "screen0032__lineleft.png",   "screen0015.png", "screen0001__linecenter.png", "screen0025.png",             "screen0200.png",
                                     //"screen0676.png", "screen0736.png", "screen0787.png", "screen0801.png", "screen0864.png", "screen0883.png", "screen0888.png", "screen0903.png", "", "",
                                     "screen0019_right.png",       "screen0076.png", "screen0073_left.png",        "screen0148_right.png",       "screen0265_right.png",      "screen0327_right.png","screen0344.png", "screen0349.png",
                                     "screen0044__linecenter.png", "",               "",                           "",                           "",                          };

                long ocr_last_ostime = DateTimeOffset.Now.ToUnixTimeMilliseconds();
                long ocr_digits      = 0;
                long ocr_digits_calc = 0;
                ocr_digits = 0;

                //FillLineDetector detector = new FillLineDetector();
                RotatedLineDetector detector = new RotatedLineDetector();


                while (true)
                {
                    long time1 = DateTimeOffset.Now.ToUnixTimeMilliseconds();
                    skip = false;

                    Console.SetCursorPosition(0, 2);
                    //Mat img = grab_screen_GetPixelColors();
                    Mat img = grab_screen2(config);
                    //Mat img = Cv2.ImRead(@"d:\src\NagiBot_BarrierX\sample\640x480_4\" + samples[15], ImreadModes.Color);
                    Console.WriteLine($"check: width = {config.Width} | {img.Cols}, height = {config.Height} | {img.Rows}, type = {img.Type()}   ");

                    // init
                    if (config.Width != img.Cols || config.Height != img.Rows || !b_init)
                    {
                        config.Width  = img.Cols;
                        config.Height = img.Rows;
                        //proportionX = configWidth / 1280f;
                        //proportionY = configHeight / 720f;

                        detector.Init(ocr, config);

                        b_init = true;
                    }

                    if (img.Type() != MatType.CV_8UC3)
                    {
                        Mat tmp = img.CvtColor(ColorConversionCodes.BGRA2BGR);
                        img.Release();
                        img = tmp;
                    }

                    if (config.Preview == 4)
                    {
                        Cv2.ImShow("source", img);
                        //Samples.Gradient(img);
                    }

                    if (config.DebugSaveFrames == 4 && time1 - debug_frame_time > debug_next_frame_delay)
                    {
                        img.ImWrite(String.Format("screen{0:d4}.png", debug_frames_count));
                        //if (configPreview > 0) { img_debug.ImWrite(String.Format("screen{0:d4}_debug.png", debug_frames_count)); }
                        debug_frames_count++;
                        debug_frame_time = time1;
                    }


                    if (ocr_digits_calc >= 1500)
                    {
                        int sizeX = 15;
                        if (ocr_digits_calc >= 13500)
                        {
                            sizeX = 190;
                        }
                        else if (ocr_digits_calc >= 12000)
                        {
                            sizeX = 170;
                        }
                        else if (ocr_digits_calc >= 10500)
                        {
                            sizeX = 150;
                        }
                        else if (ocr_digits_calc >= 9000)
                        {
                            sizeX = 130;
                        }
                        else if (ocr_digits_calc >= 7500)
                        {
                            sizeX = 110;
                        }
                        else if (ocr_digits_calc >= 6000)
                        {
                            sizeX = 100;
                        }
                        else if (ocr_digits_calc >= 4500)
                        {
                            sizeX = 70;
                        }
                        else if (ocr_digits_calc >= 3000)
                        {
                            sizeX = 40;
                        }
                        //else if (ocr_digits_calc >= 1500) sizeX = 15;
                        // crop
                        // https://github.com/VahidN/OpenCVSharp-Samples/blob/master/OpenCVSharpSample19/Program.cs
                        //double ratio = config.Height / config.Width;
                        int sizeY = (int)(sizeX * ((double)config.Height / (double)config.Width));
                        //Rect roi = new Rect(size, (int)(size * ratio), 540 - size * 2, 360 - (int)(size * ratio) * 2);
                        Rect roi = new Rect(sizeX, sizeY, config.Width - sizeX * 2, config.Height - sizeY * 2);
                        Console.WriteLine($"ROI={roi.Left}, {roi.Top}, {roi.Width}, {roi.Height}, sizeX={sizeX}, sizeY={sizeY}   ");
                        var cropped  = new Mat(img, roi); //Crop the image
                        Mat resized2 = new Mat();
                        //Cv2.CvtColor(barcode, barcode, ColorConversionCodes.BGRA2GRAY);
                        //Cv2.Resize(cropped, resized2, new Size(540, 360), 0, 0, InterpolationFlags.Lanczos4);
                        Cv2.Resize(cropped, resized2, new Size(config.Width, config.Height), 0, 0, InterpolationFlags.Lanczos4);
                        //img.Release();
                        //img = resized;

                        if (config.Preview == 4)
                        {
                            Cv2.ImShow("cropped", cropped);
                        }
                        if (config.Preview == 4)
                        {
                            Cv2.ImShow("resized2", resized2);
                        }
                        img.Release();
                        cropped.Release();
                        img = resized2;
                        // */
                    }

                    detector.ProcessFrame(img);
                    if (!detector.isGameOver)
                    {
                        if (ocr.ocrTimerText.Length >= 4 &&
                            Math.Abs(ocr.ocrTimerN - ocr_digits_calc) < 1000)    // FIX для неправильно распознанного нуля
                        {
                            ocr_digits      = ocr.ocrTimerN;
                            ocr_last_ostime = time1;
                            ocr_digits_calc = ocr_digits;
                        }
                        else
                        {
                            ocr_digits_calc = (time1 - ocr_last_ostime) / 10 + ocr_digits;
                        }
                    }
                    else
                    {
                        ocr_last_ostime = time1;
                        ocr_digits_calc = ocr_digits = 0;
                    }

                    Player player      = detector.GetPlayer();
                    Line   line_center = detector.GetLineCenter();
                    Line   line_left   = detector.GetLineLeft();
                    Line   line_right  = detector.GetLineRight();
                    Mat    img_debug   = null;
                    img_debug = detector.Get_img_debug();

                    if (player == null || line_left == null || line_center == null || line_right == null)
                    {
                        skip = true;
                    }

                    if (config.DebugSaveFrames == 1 && time1 - debug_frame_time > debug_next_frame_delay)
                    {
                        string reason = "";
                        if (player == null)
                        {
                            reason += "_player";
                        }
                        else if (line_center == null)
                        {
                            reason += "_linecenter";
                        }
                        else if (line_left == null)
                        {
                            reason += "_lineleft";
                        }
                        else if (line_right == null)
                        {
                            reason += "_lineright";
                        }
                        if (reason.Length > 0)
                        {
                            img.ImWrite(String.Format("screen{0:d4}_{1}.png", debug_frames_count, reason));
                            debug_frames_count++;
                            debug_frame_time = time1;
                        }
                    }



                    int c = Cv2.WaitKey(1);

                    //Vec3b c8 = img.Get<Vec3b>(coords[15], coords[14]);  //Point Right 3


                    long time2 = DateTimeOffset.Now.ToUnixTimeMilliseconds(); // DateTime.UtcNow.Millisecond;
                    time2 = time2 - time1;
                    //Console.WriteLine("fps=" + fps);

                    //Clean
                    //Console.SetCursorPosition(0, 0);/*
                    //Console.WriteLine($"proportionX= {proportionX}, proportionY= {proportionY}          ");
                    Console.WriteLine($"Frame takes(ms)={time2,3}, fps={fps,3}, time(ms)= {fps_time_dif,4}              ");
                    //detector.is

                    if (config.Preview > 0 && img_debug != null)
                    {
                        if (line_left != null)
                        {
                            DrawLineColorBox(img_debug, 0, line_left.color);
                        }
                        if (line_center != null)
                        {
                            DrawLineColorBox(img_debug, 1, line_center.color);
                        }
                        if (line_right != null)
                        {
                            DrawLineColorBox(img_debug, 2, line_right.color);
                        }

                        Cv2.Rectangle(img_debug, new Rect(20, 110, 170, 30), new Scalar(0, 0, 0), -1);
                        img_debug.PutText("TIMER=" + ocr_digits_calc + "/" + ocr.ocrTimerText, new Point(20, 130), HersheyFonts.HersheySimplex, 0.6, new Scalar(255, 255, 255));
                    }



                    if (next_move_pause > 0)
                    {
                        next_move_pause--;
                    }


                    //Console.WriteLine($"R={R(line_left.color)}");
                    //Bot Logic
                    //*
                    // Item0 - B, Item1- G, Item2- R
                    //if ((R(c1.R > 240 && c1.G > 90) && (c2.R > 240 && c2.G > 90))
                    //if ((R(c1) > 240 && G(c1) > 90) && (R(c2) > 240 && G(c2) > 90))
                    //if (1==2)
                    if (!skip && !config.DebugNoControls && R(line_center.color) > 220 && G(line_center.color) > 70)
                    {
                        Console.WriteLine("RED      ");
                        //Console.SetCursorPosition(0, 1);

                        //if (c3.R + c5.R + c7.R + c3.G + c5.G + c7.G < c4.R + c6.R + c8.R + c4.G + c6.G + c8.G)
                        //if (c3.Item2 + c5.Item2 + c7.Item2 + c3.Item1 + c5.Item1 + c7.Item1 < c4.Item2 + c6.Item2 + c8.Item2 + c4.Item1 + c6.Item1 + c8.Item1)
                        //if (R(c3) + R(c5) + R(c7) + G(c3) + G(c5) + G(c7) < R(c4) + R(c6) + R(c8) + G(c4) + G(c6) + G(c8))
                        //if (1==2)
                        if (R(line_right.color) + G(line_right.color) > R(line_left.color) + G(line_left.color))
                        {
                            if (R(line_center.color) + G(line_center.color) > R(line_left.color) + G(line_left.color))
                            {
                                sendLeft();
                                Console.WriteLine("Controls: Left     ");
                                if ((config.DebugSaveFrames == 2 && time1 - debug_frame_time > debug_next_frame_delay) || config.DebugSaveFrames == 4)
                                {
                                    img.ImWrite(String.Format("screen{0:d4}_left.png", debug_frames_count));
                                    if (config.Preview > 0 && img_debug != null)
                                    {
                                        img_debug.ImWrite(String.Format("screen{0:d4}_debug.png", debug_frames_count));
                                    }
                                    debug_frames_count++;
                                    debug_frame_time = time1;
                                }
                            }
                        }
                        else
                        {
                            if (R(line_center.color) + G(line_center.color) > R(line_right.color) + G(line_right.color))
                            {
                                sendRight();
                                Console.WriteLine("Controls: Right    ");
                                if ((config.DebugSaveFrames == 2 && time1 - debug_frame_time > debug_next_frame_delay) || config.DebugSaveFrames == 4)
                                {
                                    img.ImWrite(String.Format("screen{0:d4}_right.png", debug_frames_count));
                                    if (config.Preview > 0 && img_debug != null)
                                    {
                                        img_debug.ImWrite(String.Format("screen{0:d4}_debug.png", debug_frames_count));
                                    }
                                    debug_frames_count++;
                                    debug_frame_time = time1;
                                }
                            }
                        }
                    }
                    else
                    {
                        //if (isBlack(c2) || isBlack(c1))
                        if (config.AutoRestart && detector.isGameOver)
                        {
                            sendOk();
                            //Console.SetCursorPosition(0, 2);
                            Console.WriteLine("RESPAWN ?   ");
                        }
                    }
                    // */



                    if (config.Preview > 0 && img_debug != null)
                    {
                        if (config.Preview == 2)
                        {
                            img_debug = DoPyrDown(img_debug);
                        }
                        Cv2.ImShow("debug", img_debug);
                        //img_debug.Release();
                    }
                    img.Release();
                    //if (line_center != null) { line_center.contour.Release(); line_center.mask.Release(); }
                    //if (line_left != null) { line_left.contour.Release(); line_left.mask.Release(); }
                    //if (line_right != null) { line_right.contour.Release(); line_right.mask.Release(); }
                    if (line_center != null)
                    {
                        line_center.mask.Release();
                    }
                    if (line_left != null)
                    {
                        line_left.mask.Release();
                    }
                    if (line_right != null)
                    {
                        line_right.mask.Release();
                    }
                    if (config.Preview > 0 && img_debug != null)
                    {
                        img_debug.Release();
                    }


                    Console.WriteLine("                                             \n                                             \n                                             \n                                             ");

                    // Clean
                    //Console.WriteLine("                    \n                    ");
                    Thread.Sleep(config.Timeout);
                    // */

                    // fps calc
                    time2 = DateTimeOffset.Now.ToUnixTimeMilliseconds();
                    fps_calc++;
                    if (time2 - fps_time > 1000)
                    {
                        fps_time_dif = time2 - fps_time;
                        fps_time     = time2;
                        fps          = fps_calc;
                        fps_calc     = 0;
                    }
                }
            }
            catch (IndexOutOfRangeException ex)
            {
                Console.WriteLine("Game not started !");
                Console.WriteLine("Message: " + ex.Message);
                Console.WriteLine("Source: " + ex.Source);
                Console.WriteLine("StackTrace: " + ex.StackTrace);

                Console.ReadLine();
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                Console.ReadLine();
            }

            Cv2.DestroyWindow("DisplayPicture");
        }
Exemple #18
0
 public override void CloseWindow(string name)
 {
     Cv2.DestroyWindow(name);
 }
Exemple #19
0
        //img1:test image; img2:ref img
        public float MatchTemplate(Mat img1, Mat img2, bool ishowImageMatchTemplate, string s = "Match")
        {
            float matchRate = 0.0f;

            using (var descriptors1 = new Mat())
                using (var descriptors2 = new Mat())
                    using (var matcher = new BFMatcher(NormTypes.L2SQR))
                        using (var kaze = KAZE.Create())
                        {
                            KeyPoint[] keypoints1, keypoints2;
                            kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1);
                            kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2);

                            DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                            using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U))
                            {
                                mask.SetTo(new Scalar(255));
                                int nonZero = Cv2.CountNonZero(mask);
                                VoteForUniqueness(matches, mask);
                                nonZero = Cv2.CountNonZero(mask);
                                nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 20);

                                List <Point2f> obj             = new List <Point2f>();
                                List <Point2f> scene           = new List <Point2f>();
                                List <DMatch>  goodMatchesList = new List <DMatch>();
                                //iterate through the mask only pulling out nonzero items because they're matches
                                for (int i = 0; i < mask.Rows; i++)
                                {
                                    MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>();
                                    if (maskIndexer[i] > 0)
                                    {
                                        obj.Add(keypoints1[matches[i][0].QueryIdx].Pt);
                                        scene.Add(keypoints2[matches[i][0].TrainIdx].Pt);
                                        goodMatchesList.Add(matches[i][0]);
                                    }
                                }

                                List <Point2d> objPts   = obj.ConvertAll(Point2fToPoint2d);
                                List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d);
                                if (nonZero >= 4)
                                {
                                    Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask);
                                    nonZero = Cv2.CountNonZero(mask);

                                    //calculate match rate by how many match points exist
                                    //matchRate = (float)nonZero / keypoints2.Count();
                                    matchRate = 1 - (float)(keypoints2.Count() - nonZero) / (keypoints2.Count() + nonZero);

                                    if (homography != null && ishowImageMatchTemplate == true)
                                    {
                                        Point2f[] objCorners = { new Point2f(0,                 0),
                                                                 new Point2f(img1.Cols,         0),
                                                                 new Point2f(img1.Cols, img1.Rows),
                                                                 new Point2f(0,         img1.Rows) };

                                        Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography);

                                        //This is a good concat horizontal
                                        using (Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3))
                                            using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height)))
                                                using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height)))
                                                {
                                                    img1.CopyTo(left);
                                                    img2.CopyTo(right);

                                                    byte[] maskBytes = new byte[mask.Rows * mask.Cols];
                                                    mask.GetArray(0, 0, maskBytes);
                                                    Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints);


                                                    //List<List<Point>> listOfListOfPoint2D = new List<List<Point>>();
                                                    //List<Point> listOfPoint2D = new List<Point>();
                                                    //listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y));
                                                    //listOfListOfPoint2D.Add(listOfPoint2D);
                                                    //img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2);


                                                    Cv2.ImShow(s, img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2)));
                                                    Cv2.WaitKey(0);
                                                    Cv2.DestroyWindow(s);

                                                    //Window.ShowImages(img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2)));
                                                    //Window.WaitKey(0);
                                                    //Window.DestroyAllWindows();
                                                }
                                    }
                                }
                            }
                        }

            return(matchRate);
        }
Exemple #20
0
        // 双目RGB和IR静默活体检测(sdk内部调用opencv,返回FaceCallback)
        public bool rgb_ir_liveness_check_mat()
        {
            int faceNum   = 2;       //传入的人脸数
            int face_size = faceNum; //当前传入人脸数,传出人脸数

            TrackFaceInfo[] track_info = new TrackFaceInfo[faceNum];
            for (int i = 0; i < faceNum; i++)
            {
                track_info[i]           = new TrackFaceInfo();
                track_info[i].landmarks = new int[144];
                track_info[i].headPose  = new float[3];
                track_info[i].face_id   = 0;
                track_info[i].score     = 0;
            }
            int    sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo));
            IntPtr ptT       = Marshal.AllocHGlobal(sizeTrack * faceNum);
            long   ir_time   = 0;
            // 序号0为电脑识别的usb摄像头编号,本demo中0为ir红外摄像头
            // 不同摄像头和电脑识别可能有区别
            // 编号一般从0-10   */
            int          device  = select_usb_device_id();
            VideoCapture camera1 = VideoCapture.FromCamera(device);

            if (!camera1.IsOpened())
            {
                Console.WriteLine("camera1 open error");
                return(false);
            }

            VideoCapture camera2 = VideoCapture.FromCamera(device + 1);

            if (!camera2.IsOpened())
            {
                Console.WriteLine("camera2 open error");
                return(false);
            }

            RotatedRect box;
            Mat         frame1     = new Mat();
            Mat         frame2     = new Mat();
            Mat         rgb_mat    = new Mat();
            Mat         ir_mat     = new Mat();
            var         window_ir  = new Window("ir_face");
            var         window_rgb = new Window("rgb_face");

            while (true)
            {
                camera1.Read(frame1);
                camera2.Read(frame2);
                if (!frame1.Empty() && !frame2.Empty())
                {
                    if (frame1.Size(0) > frame2.Size(0))
                    {
                        rgb_mat = frame1;
                        ir_mat  = frame2;
                    }
                    else
                    {
                        rgb_mat = frame2;
                        ir_mat  = frame1;
                    }
                    float rgb_score = 0;
                    float ir_score  = 0;

                    IntPtr ptr = rgb_ir_liveness_check_faceinfo(rgb_mat.CvPtr, ir_mat.CvPtr, ref rgb_score, ref ir_score, ref face_size, ref ir_time, ptT);
                    string res = Marshal.PtrToStringAnsi(ptr);
                    Console.WriteLine("res is:{0}", res);
                    string msg_ir = "ir score is:" + ir_score.ToString();
                    Cv2.PutText(ir_mat, msg_ir, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0));
                    window_ir.ShowImage(ir_mat);
                    Cv2.WaitKey(1);
                    Console.WriteLine("{0}", msg_ir);

                    string msg_rgb = "rgb score is:" + rgb_score.ToString();
                    Cv2.PutText(rgb_mat, msg_rgb, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0));
                    for (int index = 0; index < face_size; index++)
                    {
                        IntPtr ptrTrack = (IntPtr)(ptT.ToInt64() + sizeTrack * index);
                        track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptrTrack, typeof(TrackFaceInfo));
                        Console.WriteLine("face_id is {0}:", track_info[index].face_id);
                        Console.WriteLine("landmarks is:");
                        for (int k = 0; k < 1; k++)
                        {
                            Console.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},",
                                              track_info[index].landmarks[k], track_info[index].landmarks[k + 1],
                                              track_info[index].landmarks[k + 2], track_info[index].landmarks[k + 3],
                                              track_info[index].landmarks[k + 4], track_info[index].landmarks[k + 5],
                                              track_info[index].landmarks[k + 6], track_info[index].landmarks[k + 7],
                                              track_info[index].landmarks[k + 8], track_info[index].landmarks[k + 9]
                                              );
                        }

                        for (int k = 0; k < track_info[index].headPose.Length; k++)
                        {
                            Console.WriteLine("angle is:{0:f}", track_info[index].headPose[k]);
                        }
                        Console.WriteLine("score is:{0:f}", track_info[index].score);
                        // 角度
                        Console.WriteLine("mAngle is:{0:f}", track_info[index].box.mAngle);
                        // 人脸宽度
                        Console.WriteLine("mWidth is:{0:f}", track_info[index].box.mWidth);
                        // 中心点X,Y坐标
                        Console.WriteLine("mCenter_x is:{0:f}", track_info[index].box.mCenter_x);
                        Console.WriteLine("mCenter_y is:{0:f}", track_info[index].box.mCenter_y);
                        // 画人脸框
                        FaceTrack track = new FaceTrack();
                        box = track.bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length);
                        track.draw_rotated_box(ref rgb_mat, ref box, new Scalar(0, 255, 0));
                    }
                    window_rgb.ShowImage(rgb_mat);
                    Cv2.WaitKey(1);
                    Console.WriteLine("{0}", msg_rgb);
                }
            }
            Marshal.FreeHGlobal(ptT);
            rgb_mat.Release();
            ir_mat.Release();
            frame1.Release();
            frame2.Release();
            Cv2.DestroyWindow("ir_face");
            Cv2.DestroyWindow("rgb_face");
            return(true);
        }
Exemple #21
0
        private void button1_Click(object sender, EventArgs e)
        {
            int camindex  = comboBox2.SelectedIndex;
            var capture   = new VideoCapture(camindex);
            int fps       = 30;
            int sleepTime = (int)Math.Round((decimal)1000 / fps);

            using (var window = new Window("capture"))
            {
                Mat img = new Mat();
                Mat dst = new Mat();
                while (enable == 1)
                {
                    capture.Read(img);
                    //if (int.Parse(textBox1.Text) == null) textBox1.Text = "0";
                    if (img.Empty())
                    {
                        break;
                    }

                    Cv2.CvtColor(img, dst, ColorConversionCodes.BGR2GRAY);
                    Cv2.Resize(dst, dst, new OpenCvSharp.Size(255, 255));
                    Cv2.AdaptiveThreshold(dst, dst, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 11, int.Parse(textBox1.Text));
                    //dst2 = cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY,11,float(args[1]))

                    int height = dst.Height;
                    int width  = dst.Width;
                    unsafe
                    {
                        int           kokuten = 0;
                        StringBuilder LK      = new StringBuilder(""); //Line Kisuu  ����C���̕�����
                        StringBuilder LG      = new StringBuilder(""); //Line Guusuu �������C���̕�����



                        byte *b = dst.DataPointer;
                        for (int i = 0; i < height; i++)
                        {
                            int oldx = 0;
                            int flug = 0;
                            for (int j = 0; j < width; j++)
                            {
                                //byte valueAt = b[0];
                                // b[0] = (byte)(b[0]/2);
                                b += 1;
                                if (b[0] == 0)
                                {
                                    flug += 1;
                                    if (flug == 1)
                                    {
                                        oldx = j;
                                    }
                                }
                                else
                                {
                                    if (flug != 0)
                                    {
                                        kokuten++;
                                        if (i % 2 == 0)
                                        {
                                            //snprintf(oneelement, sizeof(oneelement), "%d %d %d\n", oldx, x - 1, y);
                                            //strcat(LK, oneelement);
                                            LK.AppendFormat("{0} {1} {2}\n", oldx, j - 1, i);
                                        }
                                        else
                                        {
                                            //snprintf(oneelement, sizeof(oneelement), "%d %d %d\n", oldx, x - 1, y);
                                            //strcat(LG, oneelement);
                                            LG.AppendFormat("{0} {1} {2}\n", oldx, j - 1, i);
                                        }
                                        flug = 0;
                                    }
                                }
                            }
                            if (flug != 0)
                            {    //last pixel
                                kokuten++;
                                if (i % 2 == 0)
                                {
                                    //char oneelement[64] = "";
                                    //snprintf(oneelement, sizeof(oneelement), "%d %d %d\n", oldx, 255, y);
                                    //strcat(LK, oneelement);
                                    LK.AppendFormat("{0} {1} {2}\n", oldx, 255, i);
                                }
                                else
                                {
                                    //char oneelement[64] = "";
                                    //snprintf(oneelement, sizeof(oneelement), "%d %d %d\n", oldx, 255, y);
                                    //strcat(LG, oneelement);
                                    LG.AppendFormat("{0} {1} {2}\n", oldx, 255, i);
                                }
                                flug = 0;
                            }
                        }

                        LK.Append(LG);
                        queue.Enqueue(LK);
                    }



                    window.ShowImage(dst);
                    Cv2.WaitKey(sleepTime);
                }
                Cv2.DestroyWindow("capture");
            }
        }
Exemple #22
0
 private void button5_Click(object sender, EventArgs e)
 {
     Main.SetBlobView(false);
     Cv2.DestroyWindow(dstWindow.Name);
 }
Exemple #23
0
 private void button2_Click(object sender, EventArgs e)
 {
     Main.SetRealView(false);
     Cv2.DestroyWindow(srcWindow.Name);
 }
Exemple #24
0
        // 双目摄像头进行rgb,depth活体检测(此处适配了华杰艾米的双目摄像头)
        public bool rgb_depth_liveness_check_hjimi()
        {
            int faceNum   = 2;       //传入的人脸数
            int face_size = faceNum; //当前传入人脸数,传出人脸数

            TrackFaceInfo[] track_info = new TrackFaceInfo[faceNum];
            for (int i = 0; i < faceNum; i++)
            {
                track_info[i]           = new TrackFaceInfo();
                track_info[i].landmarks = new int[144];
                track_info[i].headPose  = new float[3];
                track_info[i].face_id   = 0;
                track_info[i].score     = 0;
            }
            int    sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo));
            IntPtr ptT       = Marshal.AllocHGlobal(sizeTrack * faceNum);

            RotatedRect box;

            IntPtr phjimi      = HjimiCamera.new_hjimi();
            var    rgb_win     = new Window("rgb", WindowMode.AutoSize);
            var    depth_win   = new Window("depth", WindowMode.Normal);
            float  rgb_score   = 0;
            float  depth_score = 0;
            Mat    cv_depth    = new Mat();
            Mat    cv_rgb      = new Mat();

            while (true)
            {
                bool ok = HjimiCamera.open_hjimimat(phjimi, cv_rgb.CvPtr, cv_depth.CvPtr);
                if (!ok)
                {
                    Console.WriteLine("open camera faile");
                    continue;
                }
                if (cv_rgb.Empty())
                {
                    continue;
                }
                if (cv_depth.Empty())
                {
                    continue;
                }
                IntPtr resptr = rgb_depth_liveness_check_faceinfo(cv_rgb.CvPtr, cv_depth.CvPtr, ref rgb_score, ref depth_score, ref face_size, ptT);
                string res    = Marshal.PtrToStringAnsi(resptr);
                Console.WriteLine("res is:{0}", res);

                for (int index = 0; index < face_size; index++)
                {
                    IntPtr ptrTrack = (IntPtr)(ptT.ToInt64() + sizeTrack * index);
                    track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptrTrack, typeof(TrackFaceInfo));
                    Console.WriteLine("in Liveness::usb_track face_id is {0}:", track_info[index].face_id);
                    Console.WriteLine("landmarks is:");
                    for (int k = 0; k < 1; k++)
                    {
                        Console.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},",
                                          track_info[index].landmarks[k], track_info[index].landmarks[k + 1],
                                          track_info[index].landmarks[k + 2], track_info[index].landmarks[k + 3],
                                          track_info[index].landmarks[k + 4], track_info[index].landmarks[k + 5],
                                          track_info[index].landmarks[k + 6], track_info[index].landmarks[k + 7],
                                          track_info[index].landmarks[k + 8], track_info[index].landmarks[k + 9]
                                          );
                    }

                    for (int k = 0; k < track_info[index].headPose.Length; k++)
                    {
                        Console.WriteLine("angle is:{0:f}", track_info[index].headPose[k]);
                    }
                    Console.WriteLine("score is:{0:f}", track_info[index].score);
                    // 角度
                    Console.WriteLine("mAngle is:{0:f}", track_info[index].box.mAngle);
                    // 人脸宽度
                    Console.WriteLine("mWidth is:{0:f}", track_info[index].box.mWidth);
                    // 中心点X,Y坐标
                    Console.WriteLine("mCenter_x is:{0:f}", track_info[index].box.mCenter_x);
                    Console.WriteLine("mCenter_y is:{0:f}", track_info[index].box.mCenter_y);
                    //// 画人脸框
                    FaceTrack track = new FaceTrack();
                    box = track.bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length);
                    track.draw_rotated_box(ref cv_rgb, ref box, new Scalar(0, 255, 0));
                }

                Mat depth_img = new Mat();
                cv_depth.ConvertTo(depth_img, MatType.CV_8UC1, 255.0 / 4500);
                string msg_depth = "depth score is:" + depth_score.ToString();
                Cv2.PutText(depth_img, msg_depth, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0));

                string msg_rgb = "rgb score is:" + rgb_score.ToString();
                Cv2.PutText(cv_rgb, msg_rgb, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0));

                rgb_win.ShowImage(cv_rgb);
                depth_win.ShowImage(depth_img);
                Cv2.WaitKey(1);
                depth_img.Release();
            }
            Marshal.FreeHGlobal(ptT);
            cv_rgb.Release();
            cv_depth.Release();
            Cv2.DestroyWindow("rgb");
            Cv2.DestroyWindow("depth");
            HjimiCamera.hjimi_release(phjimi);
            return(true);
        }
Exemple #25
0
        //双目RGB和DEPTH静默活体检测(传入opencv视频帧)适配奥比中光mini双目摄像头
        public bool rgb_depth_liveness_check_orbe()
        {
            int faceNum   = 2;       //传入的人脸数
            int face_size = faceNum; //当前传入人脸数,传出人脸数

            TrackFaceInfo[] track_info = new TrackFaceInfo[faceNum];
            for (int i = 0; i < faceNum; i++)
            {
                track_info[i]           = new TrackFaceInfo();
                track_info[i].landmarks = new int[144];
                track_info[i].headPose  = new float[3];
                track_info[i].face_id   = 0;
                track_info[i].score     = 0;
            }
            int    sizeTrack = Marshal.SizeOf(typeof(TrackFaceInfo));
            IntPtr ptT       = Marshal.AllocHGlobal(sizeTrack * faceNum);

            IntPtr pOrbe        = new_orbe(); //与OrbeRelease成对出现
            Mat    rgb_mat      = new Mat(480, 640, MatType.CV_8UC3);
            Mat    depth_mat    = new Mat(480, 640, MatType.CV_16UC1);
            float  rgb_score    = 0;
            float  depth_score  = 0;
            var    window_depth = new Window("depth_face");
            var    window_rgb   = new Window("rgb_face");

            while (true)
            {
                RotatedRect box;
                open_orbe(pOrbe, rgb_mat.CvPtr, depth_mat.CvPtr);
                Console.WriteLine("rgb_mat rows {0} depth_mat rows {1}", rgb_mat.Rows, depth_mat.Rows);
                if (!rgb_mat.Empty() && !depth_mat.Empty())
                {
                    IntPtr resptr = rgb_depth_liveness_check_faceinfo(rgb_mat.CvPtr, depth_mat.CvPtr, ref rgb_score, ref depth_score, ref face_size, ptT);
                    string res    = Marshal.PtrToStringAnsi(resptr);
                    Console.WriteLine("res is:{0}", res);

                    for (int index = 0; index < face_size; index++)
                    {
                        IntPtr ptrTrack = (IntPtr)(ptT.ToInt64() + sizeTrack * index);
                        track_info[index] = (TrackFaceInfo)Marshal.PtrToStructure(ptrTrack, typeof(TrackFaceInfo));
                        Console.WriteLine("in Liveness::usb_track face_id is {0}:", track_info[index].face_id);
                        Console.WriteLine("landmarks is:");
                        for (int k = 0; k < 1; k++)
                        {
                            Console.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},",
                                              track_info[index].landmarks[k], track_info[index].landmarks[k + 1],
                                              track_info[index].landmarks[k + 2], track_info[index].landmarks[k + 3],
                                              track_info[index].landmarks[k + 4], track_info[index].landmarks[k + 5],
                                              track_info[index].landmarks[k + 6], track_info[index].landmarks[k + 7],
                                              track_info[index].landmarks[k + 8], track_info[index].landmarks[k + 9]
                                              );
                        }

                        for (int k = 0; k < track_info[index].headPose.Length; k++)
                        {
                            Console.WriteLine("angle is:{0:f}", track_info[index].headPose[k]);
                        }
                        Console.WriteLine("score is:{0:f}", track_info[index].score);
                        // 角度
                        Console.WriteLine("mAngle is:{0:f}", track_info[index].box.mAngle);
                        // 人脸宽度
                        Console.WriteLine("mWidth is:{0:f}", track_info[index].box.mWidth);
                        // 中心点X,Y坐标
                        Console.WriteLine("mCenter_x is:{0:f}", track_info[index].box.mCenter_x);
                        Console.WriteLine("mCenter_y is:{0:f}", track_info[index].box.mCenter_y);
                        //// 画人脸框
                        FaceTrack track = new FaceTrack();
                        box = track.bounding_box(track_info[index].landmarks, track_info[index].landmarks.Length);
                        track.draw_rotated_box(ref rgb_mat, ref box, new Scalar(0, 255, 0));
                    }
                }
                string msg_rgb = "rgb score is:" + rgb_score.ToString();
                Cv2.PutText(rgb_mat, msg_rgb, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 100, 0));
                window_rgb.ShowImage(rgb_mat);
                //Cv2.ImShow("rgb_face", rgb_mat);

                string msg_depth = "depth score is:" + depth_score.ToString();
                Cv2.PutText(depth_mat, msg_depth, new Point(20, 50), HersheyFonts.HersheyComplex, 1, new Scalar(255, 255, 255));
                window_depth.ShowImage(depth_mat);
                //Cv2.ImShow("depth_face", depth_mat);
                int c = Cv2.WaitKey(2);
                if (27 == c)
                {
                    break;
                }
            }
            orbe_release(pOrbe);//与new_orbe成对出现
            Marshal.FreeHGlobal(ptT);
            rgb_mat.Release();
            depth_mat.Release();
            Cv2.DestroyWindow("depth_face");
            Cv2.DestroyWindow("rgb_face");
            return(true);
        }
Exemple #26
0
        private void ScoreAllMatchPages(Mat mat)
        {
            string mainLog       = "--------------------------ScoreAllMatchPages-----------------------------\n";
            string maxPageName   = Globals.UnknownPage;
            double maxSimilarity = 0.0f;

            MatchPage maxPage = null;
            DateTime  start   = DateTime.Now;

            MatOfFloat descriptors = null;

            _keypoints = null;
            Mat gray = null;

            FastMatchSample.FastDescriptor(mat, out descriptors, out _keypoints, out gray);
            //FastMatchSample.SiftDescriptor(mat, out descriptors, out keypoints, out gray);

            double dissimilarity;
            double similarity;
            string log;

            if (_currentPageName == Globals.UnknownPage)
            {
                foreach (var p in _matchPages)
                {
                    var page = p.Value;

                    int score = page.PageScore(descriptors, out dissimilarity, out similarity, out log);

                    if (_log)
                    {
                        mainLog += log;
                    }

                    similarity = page.AdditionalCalcSimilarity(mat, similarity);

                    if (similarity > maxSimilarity)
                    {
                        maxSimilarity = similarity;
                        maxPageName   = p.Value.PageName;
                        maxPage       = page;
                    }
                }
            }
            else
            {
                var rPages = _matchPages[_currentPageName].RelatedPages;

                foreach (var p in rPages)
                {
                    if (!_matchPages.ContainsKey(p))
                    {
                        Console.WriteLine("!!!!!!!!!!!!!!!!!!!!!!!! no Page!!!!!!!!!!!!!!!!!!!!!! [" + p + "]");
                        continue;
                    }

                    var page = _matchPages[p];

                    int score = page.PageScore(
                        descriptors, out dissimilarity, out similarity, out log);

                    if (_log)
                    {
                        mainLog += log;
                    }


                    similarity = page.AdditionalCalcSimilarity(mat, similarity);

                    if (similarity > maxSimilarity)
                    {
                        maxSimilarity = similarity;
                        maxPageName   = page.PageName;
                        maxPage       = page;
                    }
                }
            }


            if (RenderImageProcess)
            {
                if (maxPage != null)
                {
                    maxPage.RenderKeyPoints();
                }

                Utils.RenderKeyPoints(Globals.ImShowCurrentPageName, mat, _keypoints);
            }
            else
            {
                Cv2.DestroyWindow(Globals.ImShowMaxMatchPageName);
                Cv2.DestroyWindow(Globals.ImShowCurrentPageName);
            }


            DateTime end  = DateTime.Now;
            var      time = (end - start).TotalMilliseconds;

            if (maxSimilarity > 250.0f)
            {
                ChangePage(maxPageName);
            }
            else
            {
                ChangePage(Globals.UnknownPage);
            }


            if (_log /*&& _prevPageName != _currentPageName*/)
            {
                mainLog += "\n[" + _currentPageName + "]  maxSimilarity : " + maxSimilarity + "  total time : " + time + "\n";
                mainLog += "-------------------------------------------------------------------------\n\n";
                Console.WriteLine(mainLog);
            }
        }
 public Mat[] getImages()
 {
     Cv2.DestroyWindow("Step By Step");
     return(stepImageBuffer);
 }
Exemple #28
0
        /// <summary>Algorhitms the specified current roi.</summary>
        /// <param name="curRoi">Матрица с областью интереса текущего кадра</param>
        /// <param name="time">Время для отслеживания простоя</param>
        /// <param name="lines">  Линии ROI</param>
        /// <param name="grannyInTheROI">Флаг наличия жёлтого обьекта на стане в ROI</param>
        /// <param name="failure">  Флаг наличия простоя</param>
        private void Algorhitm(ref Mat curRoi, ref DateTime time, float[][] lines, IEnumerable <OpenCvSharp.Point> newDots, ref bool grannyInTheROI, ref bool failure)
        {
            try
            {
                //var tmp = new Mat();
                //вычитаем из предыдущего кадра текущий
                //Cv2.Subtract(curRoi, lastFrame, tmp);

                //tmp = curRoi.Clone();

                //Извлечение канала для улучшения детектинга бабки
                //var ins = tmp.CvtColor(ColorConversionCodes.BGR2GRAY);

                var ins    = Mat.Zeros(curRoi.Size(), MatType.CV_8UC1).ToMat();
                var tmpins = Mat.Zeros(curRoi.Size(), MatType.CV_8UC1).ToMat();

                //Cv2.Threshold(ins, ins, binaryEdge, 255, ThresholdTypes.Binary); // binaryEdge - С формы бегунок
                Cv2.InRange(curRoi, new Scalar(120, 180, 200), new Scalar(200, 255, 255), tmpins);

                //new Action(() => Cv2.ImShow("Frame", ins)).MakeActInMainThread();

                //Обрезание ROI по маске
                //var mask = CreateMask(ref tmp, lines);

                var polyMask = Mat.Zeros(curRoi.Size(), MatType.CV_8UC1).ToMat();
                Cv2.FillPoly(polyMask, new List <IEnumerable <OpenCvSharp.Point> >()
                {
                    newDots
                }, Scalar.White);

                Cv2.BitwiseAnd(tmpins, polyMask, tmpins);

                var cnt = tmpins.FindContoursAsArray(RetrievalModes.External, ContourApproximationModes.ApproxNone).Where(x => x.Count() > 80).ToArray();
                for (int i = 0; i < cnt.Count(); i++)
                {
                    Cv2.DrawContours(ins, cnt, i, Scalar.White, -1);
                }

                //ins = tmpins;

                //нужен исключительно для демонстрации нарисованного контура выделения бабки, в решении не используется
                //var tmpWithCont = curRoi.Clone();
                //tmp.Dispose();

                if (ins.CountNonZero() > detectionEdge)
                {
                    polyMask.Dispose();
                    // остановка времени отсчёта отсутствия бабки

                    if (failure)
                    {
                        failureTimeMin += DateTime.Now.Subtract(time).TotalMinutes;
                        failure         = false;

                        Extensions.BeginInvoke(() =>
                        {
                            mw.txtBlockAlarm.Visibility = Visibility.Hidden;
                            WriteStateLogs($"Конец простоя длительностью {Math.Round(DateTime.Now.Subtract(curFailure).TotalMinutes, 2)} мин.", logName);

                            SaveImg(curFailure + DateTime.Now.Subtract(curFailure), curFrame, true);
                            WriteStateLogsInDB();                             //Для записи в БД
                        });
                    }
                    else
                    {
                        workTimeMin += DateTime.Now.Subtract(time).TotalMinutes;
                    }

                    time = DateTime.Now;

                    grannyInTheROI = true;

                    //var rect = ToRect(rotRect);
                    //tmpWithCont.Rectangle(rect, new Scalar(0, 255, 0), 3);
                }

                else if (grannyInTheROI)
                {
                    workTimeMin   += DateTime.Now.Subtract(time).TotalMinutes;
                    time           = DateTime.Now;
                    grannyInTheROI = false;
                }

                //поиск контуров на фрейме
                //var contours = ins.FindContoursAsArray(RetrievalModes.External, ContourApproximationModes.ApproxSimple).Where(x => x.Count() > detectionEdge);

                //var count = contours.Count();

                //for (var i = 0; i < count; i++)
                //{
                //	//построение повёрнутого прямоугольника вокруг пятна
                //	//var rotRect = Cv2.MinAreaRect(contours.ElementAt(i));

                //	if (GrannyDetect(contours.ElementAt(i), ref ins, plane.dots))
                //	{
                //		mask.Dispose();
                //		// остановка времени отсчёта отсутствия бабки

                //		if (failure)
                //		{
                //			failureTimeMin += DateTime.Now.Subtract(time).TotalMinutes;
                //			failure = false;

                //			Extensions.BeginInvoke(() =>
                //			{
                //				mw.txtBlockAlarm.Visibility = Visibility.Hidden;
                //				WriteStateLogs($"Конец простоя длительностью {Math.Round(DateTime.Now.Subtract(curFailure).TotalMinutes, 2)} мин.", logName);

                //				SaveImg(curFailure + DateTime.Now.Subtract(curFailure), curFrame, true);
                //				WriteStateLogsInDB(); //Для записи в БД
                //			});
                //		}
                //		else
                //		{
                //			workTimeMin += DateTime.Now.Subtract(time).TotalMinutes;
                //		}

                //		time = DateTime.Now;

                //		grannyInTheROI = true;

                //		//var rect = ToRect(rotRect);
                //		//tmpWithCont.Rectangle(rect, new Scalar(0, 255, 0), 3);
                //		break;
                //	}

                //	if (grannyInTheROI)
                //	{
                //		workTimeMin += DateTime.Now.Subtract(time).TotalMinutes;
                //		time = DateTime.Now;
                //		grannyInTheROI = false;
                //	}
                //}

                //var detectTmp = tmpWithCont.ToImage();
                //tmpWithCont.Dispose();
                //detectTmp.Freeze();

                //показ трешхолднутого изображения, если понадобится
                Extensions.BeginInvoke(() =>
                {
                    if (mw.checkThresh.IsChecked == true)
                    {
                        Cv2.ImShow("Threshold", ins.Clone());
                    }
                    else if (!(bool)mw.checkThresh.IsChecked)
                    {
                        Cv2.DestroyWindow("Threshold");
                    }

                    //mw.myLittleImage.Source = detectTmp;
                });

                lastTime = DateTime.Now;
                lastFrame.Dispose();
                // Делаем предыдущий кадр равным текущему
                lastFrame = curRoi.Clone();
            }
            catch (Exception e)
            {
            }
        }
Exemple #29
0
 private void button1_Click(object sender, EventArgs e)
 {
     Cv2.DestroyWindow(srcWindow.Name);
     cam.Release();
     Close();
 }
Exemple #30
0
        public Mat[] HistogramCalculation(Mat img, bool showImage = false)
        {
            //Mat img = new Mat(Images.picSetRef, ImreadModes.Color);
            Mat src = new Mat();

            Cv2.CvtColor(img, src, ColorConversionCodes.RGB2HSV);

            using (Mat histogram = new Mat())
            {
                //计算直方图
                Mat[] mats = Cv2.Split(src); //分割图像(把三通道分割为3个单通道)
                //Mat hist_H = new Mat();
                //Mat hist_S = new Mat();
                //Mat hist_V = new Mat();
                Mat[] hist_HSV = { new Mat(), new Mat(), new Mat() };

                int[] channels0 = { 0 };
                int[] channels1 = { 1 };
                int[] channels2 = { 2 };
                int[] histSize  = { 16 };   //256

                Rangef[] rangefh = new Rangef[]
                {
                    new Rangef(0, 180),
                };
                Rangef[] rangefsv = new Rangef[]
                {
                    new Rangef(0, 256),
                };


                //     computes the joint dense histogram for a set of images.
                //      计算一组图像的联合密集直方图。
                Cv2.CalcHist(mats, channels0, new Mat(), hist_HSV[0], 1, histSize, rangefh, true, false);
                Cv2.CalcHist(mats, channels1, new Mat(), hist_HSV[1], 1, histSize, rangefsv, true, false);
                Cv2.CalcHist(mats, channels2, new Mat(), hist_HSV[2], 1, histSize, rangefsv, true, false);

                int high      = 400;
                int width     = 512;
                int bin_w     = width / 16;                                                 //每个bins的宽度  画布的宽度除以bins的个数  //256
                Mat histImage = new Mat(width, high, MatType.CV_8UC3, new Scalar(0, 0, 0)); //定义一个Mat对象,相当于一个画布
                //using (new Window("SRC", src))
                //using (new Window("histImage", histImage))
                //{
                //    Cv2.WaitKey(0);
                //    Cv2.DestroyWindow("SRC");
                //    Cv2.DestroyWindow("histImage");
                //}

                //归一化,像素值有可能数据量很大,压缩一下。是范围在定义画布的范围内。
                Cv2.Normalize(hist_HSV[0], hist_HSV[0], 0, histImage.Rows, NormTypes.MinMax, -1, null);
                Cv2.Normalize(hist_HSV[1], hist_HSV[1], 0, histImage.Rows, NormTypes.MinMax, -1, null);
                Cv2.Normalize(hist_HSV[2], hist_HSV[2], 0, histImage.Rows, NormTypes.MinMax, -1, null);

                //绘制直方图
                for (int i = 1; i < 16; i++)//遍历直方图的级数 //256
                {
                    //B 画线,一条线有两个点组成。首先确定每个点的坐标(x,y) .遍历从1开始。0 ~ 1 两个点组成一条线,依次类推。
                    Cv2.Line(histImage, new Point(bin_w * (i - 1), high - Math.Round(hist_HSV[0].At <float>(i - 1))), new Point(bin_w * (i - 1), high - Math.Round(hist_HSV[0].At <float>(i))), new Scalar(255, 0, 0), 1, LineTypes.AntiAlias);

                    //G
                    Cv2.Line(histImage, new Point(bin_w * (i - 1), high - Math.Round(hist_HSV[1].At <float>(i - 1))), new Point(bin_w * (i - 1), high - Math.Round(hist_HSV[1].At <float>(i))), new Scalar(0, 255, 0), 1, LineTypes.AntiAlias);

                    //R
                    Cv2.Line(histImage, new Point(bin_w * (i - 1), high - Math.Round(hist_HSV[2].At <float>(i - 1))), new Point(bin_w * (i - 1), high - Math.Round(hist_HSV[2].At <float>(i))), new Scalar(0, 0, 255), 1, LineTypes.AntiAlias);
                }
                if (showImage == true)
                {
                    using (new Window("SRC", src))   //WindowMode.Normal
                        using (new Window("histImage", histImage))
                        {
                            Cv2.WaitKey(0);
                            Cv2.DestroyWindow("SRC");
                            Cv2.DestroyWindow("histImage");
                        }
                }

                return(hist_HSV);
            }
        }