コード例 #1
0
        private static void Main(string[] args)
        {
            Mat src   = new Mat("data/tsukuba_left.png", LoadMode.GrayScale);
            Mat dst20 = new Mat();
            Mat dst40 = new Mat();
            Mat dst44 = new Mat();

            using (CLAHE clahe = Cv2.CreateCLAHE())
            {
                clahe.ClipLimit = 20;
                clahe.Apply(src, dst20);
                clahe.ClipLimit = 40;
                clahe.Apply(src, dst40);
                clahe.TilesGridSize = new Size(4, 4);
                clahe.Apply(src, dst44);
            }

            Window.ShowImages(src, dst20, dst40, dst44);

            /*var img1 = new IplImage("data/lenna.png", LoadMode.Color);
             * var img2 = new IplImage("data/match2.png", LoadMode.Color);
             * Surf(img1, img2);*/

            //Mat[] mats = StitchingPreprocess(400, 400, 10);
            //Stitching(mats);
            //Track();
            //Run();
        }
コード例 #2
0
        private static void Clahe()
        {
            Mat src   = new Mat("data/tsukuba_left.png", ImreadModes.GrayScale);
            Mat dst20 = new Mat();
            Mat dst40 = new Mat();
            Mat dst44 = new Mat();

            using (CLAHE clahe = Cv2.CreateCLAHE())
            {
                clahe.ClipLimit = 20;
                clahe.Apply(src, dst20);
                clahe.ClipLimit = 40;
                clahe.Apply(src, dst40);
                clahe.TilesGridSize = new Size(4, 4);
                clahe.Apply(src, dst44);
            }

            Window.ShowImages(src, dst20, dst40, dst44);
        }
コード例 #3
0
        public void Run()
        {
            Mat src  = new Mat(FilePath.Image.TsukubaLeft, ImreadModes.Grayscale);
            Mat dst1 = new Mat();
            Mat dst2 = new Mat();
            Mat dst3 = new Mat();

            using (CLAHE clahe = Cv2.CreateCLAHE())
            {
                clahe.ClipLimit = 20;
                clahe.Apply(src, dst1);
                clahe.ClipLimit = 40;
                clahe.Apply(src, dst2);
                clahe.TilesGridSize = new Size(4, 4);
                clahe.Apply(src, dst3);
            }

            Window.ShowImages(
                new[] { src, dst1, dst2, dst3 },
                new[] { "src", "dst clip20", "dst clip40", "dst tile4x4" });
        }
コード例 #4
0
        //5. 图像增强方法2(输入图像为灰度图像)(通过clahe函数进行自适应直方图均衡化对其进行图像增强)
        public static Mat ImageEnhancementMethod2(Mat inMat, Mat outMat)
        {
            //Mat mat5 = new Mat();
            using (CLAHE clahe = Cv2.CreateCLAHE())
            {
                //clahe.ClipLimit = 20;

                clahe.TilesGridSize = new Size(4, 4);
                clahe.Apply(inMat, outMat);
                //clahe.ClipLimit = 40;
                //clahe.Apply(src, dst2);
                //clahe.TilesGridSize = new Size(4, 4);
                //clahe.Apply(src, dst3);
            }
            return(outMat);
        }
コード例 #5
0
        public static Mat EscalaGrisesEqualizada(Mat imagen)
        {
            /* bgr a grayscale */
            Mat imagenGris = new Mat();

            Cv2.CvtColor(imagen, imagenGris, ColorConversionCodes.BGR2GRAY);

            /* ecualización por histograma */
            Mat   imagenGrisEqualizada  = new Mat();
            CLAHE ecualizadorHistograma = Cv2.CreateCLAHE(5, new Size(3, 3));

            ecualizadorHistograma.Apply(imagenGris, imagenGrisEqualizada);

            /* liberar memoria */
            imagenGris.Release();
            return(imagenGrisEqualizada);
        }
コード例 #6
0
        public static void NormalizeRGB(this Mat self, Mat output, double clip)
        {
            if (self.Channel != 3)
            {
                throw new NotSupportedException("Channel sould be RGB");
            }

            ConvertColor(self, ColorConversionCodes.BGR2Lab);

            Mat[] spl = Split(self);

            CLAHE c = CLAHE.Create(clip, new OpenCvSharp.Size(8, 8));

            c.Apply(spl[0], spl[0]);

            Merge(self, spl);

            ConvertColor(self, ColorConversionCodes.Lab2BGR);
        }
コード例 #7
0
        public void cuda_CLAHE()
        {
            Mat  src  = Image("lenna.png", ImreadModes.Grayscale);
            Size size = src.Size();

            Cuda.CLAHE clahe      = Cuda.CLAHE.create(20.0);
            CLAHE      clahe_gold = CLAHE.Create(20.0);

            using (GpuMat g_src = new GpuMat(size, src.Type()))
                using (GpuMat dst = new GpuMat()) {
                    g_src.Upload(src);

                    clahe.Apply(g_src, dst);
                    Mat dst_gold = new Mat();
                    clahe_gold.Apply(src, dst_gold);

                    ImageEquals(dst_gold, dst, 1.0);
                    ShowImagesWhenDebugMode(src, dst);
                }
        }
コード例 #8
0
        private Mat Recipe(string path, double value, string option)
        {
            if (path != null)
            {
                Mat orgMat     = new Mat(path);
                Mat previewMat = new Mat();

                #region //Algorithm
                Mat matrix = new Mat();
                switch (option)
                {
                case "Contrast":
                    Cv2.AddWeighted(orgMat, value, orgMat, 0, 0, previewMat);
                    break;
                //AddWeighted 함수를 이용해서 gamma 인자를 통해 가중치의 합에 추가적인 덧셈을 한꺼번에 수행 할 수 있다.
                //computes weighted sum of two arrays (dst = alpha*src1 + beta*src2 + gamma)
                //http://suyeongpark.me/archives/tag/opencv/page/2

                case "Brightness":
                    Cv2.Add(orgMat, value, previewMat);
                    break;
                //Add 함수를 이용해서 영상의 덧셈을 수행 한다.
                //Add 연산에서는 자동으로 포화 연산을 수행한다.
                //http://suyeongpark.me/archives/tag/opencv/page/2


                case "Blur":
                    Cv2.GaussianBlur(orgMat, previewMat, new OpenCvSharp.Size(9, 9), value, 1, BorderTypes.Default);     //GaussianBlur
                    break;
                //영상이나 이미지를 흐림 효과를 주어 번지게 하기 위해 사용합니다. 해당 픽셀의 주변값들과 비교하고 계산하여 픽셀들의 색상 값을 재조정합니다.
                //각 필세마다 주변의 픽셀들의 값을 비교하고 계산하여 픽섹들의 값을 재조정 하게 됩니다. 단순 블러의 경우 파란 사격형 안에 평균값으로
                //붉은색 값을 재종하게 되고, 모든 픽셀들에 대하여 적용을 하게 된다.
                //https://076923.github.io/posts/C-opencv-13/

                case "Rotation":
                    matrix = Cv2.GetRotationMatrix2D(new Point2f(orgMat.Width / 2, orgMat.Height / 2), value, 1.0);     // 2x3 회전 행렬 생성 함수 GetRotationMatrix2D
                    Cv2.WarpAffine(orgMat, previewMat, matrix, new OpenCvSharp.Size(orgMat.Width, orgMat.Height), InterpolationFlags.Linear, BorderTypes.Replicate);
                    break;
                //WarpAffine(원본 배열, 결과 배열, 행렬, 결과 배열의 크기) 결과 배열의 크기를 설정하는 이유는 회전 후, 원본 배열의 이미지 크기와 다를 수 있기 때문이다.
                //Interpolation.Linear은 영상이나 이미지 보간을 위해 보편적으로 사용되는 보간법이다.
                //BoderTypes.Replicate 여백을 검은색으로 채우면서 회전이 되더라도 zeropadding 된다.
                //https://076923.github.io/posts/C-opencv-6/

                case "Rotation90":
                    matrix = Cv2.GetRotationMatrix2D(new Point2f(orgMat.Width / 2, orgMat.Height / 2), 90, 1.0);
                    Cv2.WarpAffine(orgMat, previewMat, matrix, new OpenCvSharp.Size(orgMat.Width, orgMat.Height), InterpolationFlags.Linear, BorderTypes.Reflect);
                    break;
                //WarpAffine(원본 배열, 결과 배열, 행렬, 결과 배열의 크기) 결과 배열의 크기를 설정하는 이유는 회전 후, 원본 배열의 이미지 크기와 다를 수 있기 때문이다.
                //Interpolation.Linear은 영상이나 이미지 보간을 위해 보편적으로 사용되는 보간법이다.
                //BoderTypes.Replicate 여백을 검은색으로 채우면서 회전이 되더라도 zeropadding 된다.
                //https://076923.github.io/posts/C-opencv-6/


                case "Horizontal Flip":
                    Cv2.Flip(orgMat, previewMat, FlipMode.Y);
                    break;
                //Flip(원본 이미지, 결과 이미지, 대칭 축 색상 공간을 변환), 대칭 축(FlipMode)를 사용하여 대칭 진행
                //https://076923.github.io/posts/C-opencv-5/


                case "Vertical Flip":
                    Cv2.Flip(orgMat, previewMat, FlipMode.X);
                    break;
                //Flip(원본 이미지, 결과 이미지, 대칭 축 색상 공간을 변환), 대칭 축(FlipMode)를 사용하여 대칭 진행
                //https://076923.github.io/posts/C-opencv-5/


                case "Noise":
                    matrix = new Mat(orgMat.Size(), MatType.CV_8UC3);
                    Cv2.Randn(matrix, Scalar.All(0), Scalar.All(value));
                    Cv2.AddWeighted(orgMat, 1, matrix, 1, 0, previewMat);
                    break;
                //Randn 정규 분포를 나타내는 이미지를 랜덤하게 생성하는 방법
                //AddWeighted 두 이미지를 가중치를 설정하여 합치면서 진행
                //

                case "Zoom In":
                    //#1. Center만 확대
                    double width_param  = (int)(0.8 * orgMat.Width);                                                                                                                           // 0.8이 배율 orgMat.Width이 원본이미지의 사이즈 // 나중에 0.8만 80%형식으로 바꿔서 파라미터로 빼야됨
                    double height_param = (int)(0.8 * orgMat.Height);                                                                                                                          // 0.8이 배율 orgMat.Height 원본이미지의 사이즈 //
                    int    startX       = orgMat.Width - (int)width_param;                                                                                                                     // 이미지를 Crop해올 좌상단 위치 지정하는값 // 원본사이즈 - 배율로 감소한 사이즈
                    int    startY       = orgMat.Height - (int)height_param;                                                                                                                   //
                    Mat    tempMat      = new Mat(orgMat, new OpenCvSharp.Rect(startX, startY, (int)width_param - (int)(0.2 * orgMat.Width), (int)height_param - (int)(0.2 * orgMat.Height))); //중간과정 mat이고 Rect안에 x,y,width,height 값 지정해주는거
                                                                                                                                                                                               //예외처리 범위 밖으로 벗어나는경우 shift시키거나 , 제로페딩을 시키거나
                                                                                                                                                                                               //예외처리
                    Cv2.Resize(tempMat, previewMat, new OpenCvSharp.Size(orgMat.Width, orgMat.Height), (double)((double)orgMat.Width / (double)(width_param - (int)(0.2 * orgMat.Width))),
                               (double)((double)orgMat.Height / ((double)(height_param - (int)(0.2 * orgMat.Height)))), InterpolationFlags.Cubic);
                    // (double) ( (double)orgMat.Width  /  (double)width_param)
                    // 형변환       원본이미지 형변환      /       타겟이미지 배율    == 타겟이미지가 원본이미지 대비 몇배인가? 의 수식임
                    // (double) ( (double)orgMat.Height  /  (double)height_param)
                    break;


                case "Sharpen":
                    float   filterBase   = -1f;
                    float   filterCenter = filterBase * -9;
                    float[] data         = new float[9] {
                        filterBase, filterBase, filterBase,
                        filterBase, filterCenter, filterBase,
                        filterBase, filterBase, filterBase
                    };
                    Mat kernel = new Mat(3, 3, MatType.CV_32F, data);
                    Cv2.Filter2D(orgMat, previewMat, orgMat.Type(), kernel);
                    break;
                //
                //
                //

                // Contrast Limited Adapative Histogram Equalization
                case "CLAHE":
                    CLAHE test = Cv2.CreateCLAHE();
                    test.SetClipLimit(10.0f);
                    if (value < 1)
                    {
                        value = 1;
                    }
                    test.SetTilesGridSize(new OpenCvSharp.Size(value, value));
                    Mat normalized = new Mat();
                    Mat temp       = new Mat();
                    Cv2.CvtColor(orgMat, orgMat, ColorConversionCodes.RGB2HSV);
                    var splitedMat = orgMat.Split();

                    test.Apply(splitedMat[2], splitedMat[2]);
                    Cv2.Merge(splitedMat, previewMat);
                    Cv2.CvtColor(previewMat, previewMat, ColorConversionCodes.HSV2RGB);
                    break;

                //
                //
                //

                default:
                    break;
                }
                matrix.Dispose(); //이미지의 메모리 할당을 해제 합니다.
                orgMat.Dispose(); //이미지의 메모리 할당을 해제 합니다.
                return(previewMat);

                #endregion
            }
            return(null);
        }
コード例 #9
0
ファイル: AutoRotate.cs プロジェクト: EdHubbell/CSAMUtils
        public static double?GetRotationAngle(string inputFileFolderPath, string inputFileName, bool showMessageBoxes, ImageBox iboxRaw, ImageBox iboxProcessed, out double msecElapsed)
        {
            double?rotationAngle = null;

            msecElapsed = 0;

            // Hough algo does a bad job detecting horizontal lines. So we rotate the image by a set amount before running the Hough.
            double houghRotationOffsetAngle = 25.0;

            try
            {
                Stopwatch stopWatch = new Stopwatch();
                stopWatch.Start();

                if (iboxProcessed != null)
                {
                    iboxProcessed.Image = null;
                    iboxProcessed.Refresh();
                }

                Mat rotated = new Mat();

                Mat src = new Mat(inputFileFolderPath + inputFileName, ImreadModes.Grayscale);

                if (showMessageBoxes && iboxRaw != null)
                {
                    iboxRaw.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(src);
                }


                // Not needed if we read as grayscale to start with.
                //Mat src8UC1 = new Mat();
                //src.ConvertTo(src8UC1, MatType.CV_8UC1);

                // I'm not sure why we do the gauss - It seems like everyone does it, it's cheap, so we do it. ~Ed
                Mat gauss = new Mat();
                Cv2.GaussianBlur(src, gauss, new OpenCvSharp.Size(3, 3), 2, 2);
                LogEvent("gauss", showMessageBoxes, gauss, iboxProcessed);

                // An attempt to get the contrast across the image to be somewhat uniform.
                CLAHE claheFilter = Cv2.CreateCLAHE(4, new OpenCvSharp.Size(10, 10));
                Mat   clahe       = new Mat();
                claheFilter.Apply(gauss, clahe);
                LogEvent("clahe", showMessageBoxes, clahe, iboxProcessed);

                // An attempt to get the contrast across the image to be somewhat uniform.
                Mat hist = new Mat();
                Cv2.EqualizeHist(gauss, hist);
                LogEvent("hist", showMessageBoxes, hist, iboxProcessed);



                // Grab a template from some middle part of the image. Eventually, the size and location of this
                // template will be specified. It is very possible we'll have to grab multiple templates, as the
                // location of the template may impact the accuracy of the rotation.
                // e.g. - if the template is an image of a damaged device (which may happen at any location), the calculated
                // rotation may be wrong. Testing is required.
                // The locations where the template matches will create an image with lines that are offset from 0/90 degrees.
                // This is because we can assume that the devices are orthogonal to one another, even if the image itself is
                // offset rotationally.
                Rect r1       = new Rect(new OpenCvSharp.Point(1000, 1000), new OpenCvSharp.Size(500, 300));
                var  roi      = new Mat(clahe, r1);
                Mat  template = new Mat(new OpenCvSharp.Size(500, 300), MatType.CV_8UC1);
                roi.CopyTo(template);

                LogEvent("template", showMessageBoxes, template, iboxProcessed);


                Mat templateMatch = new Mat();

                Cv2.MatchTemplate(clahe, template, templateMatch, TemplateMatchModes.CCoeffNormed);
                LogEvent("templatematch", showMessageBoxes, templateMatch, iboxProcessed);

                Mat normalized = new Mat();
                normalized = templateMatch.Normalize(0, 255, NormTypes.MinMax);
                normalized.ConvertTo(normalized, MatType.CV_8UC1);
                LogEvent("normalized template match", showMessageBoxes, normalized, iboxProcessed);


                // This winnows down the number of matches.
                Mat thresh = new Mat();
                Cv2.Threshold(normalized, thresh, 200, 255, ThresholdTypes.Binary);
                LogEvent("threshold template match", showMessageBoxes, thresh, iboxProcessed);

                // rotate the image because hough doesn't work very well to find horizontal lines.
                Mat rotatedThresh = new Mat();
                Cv2E.RotateDegrees(thresh, rotatedThresh, houghRotationOffsetAngle);
                LogEvent("rotatedThresh", showMessageBoxes, rotatedThresh, iboxProcessed);

                Mat erode = new Mat();
                Cv2.Erode(rotatedThresh, erode, new Mat());
                LogEvent("erode", showMessageBoxes, erode, iboxProcessed);


                LineSegmentPoint[] segHoughP = Cv2.HoughLinesP(rotatedThresh, 1, Math.PI / 1800, 2, 10, 600);


                Mat imageOutP = new Mat(src.Size(), MatType.CV_8UC3);

                // We're limiting the rotation correction to +/- 10 degrees. So we only care about hough lines that fall within 80 to 100 or 170 to 190
                List <double> anglesNear90 = new List <double>();
                List <double> anglesNear0  = new List <double>();

                foreach (LineSegmentPoint s in segHoughP)
                {
                    try
                    {
                        // Add lines to the image, if we're going to look at it.
                        if (showMessageBoxes)
                        {
                            imageOutP.Line(s.P1, s.P2, Scalar.White, 1, LineTypes.AntiAlias, 0);
                        }

                        var radian = Math.Atan2((s.P1.Y - s.P2.Y), (s.P1.X - s.P2.X));
                        var angle  = ((radian * (180 / Math.PI) + 360) % 360);

                        // We rotated the image because the hough algo does a bad job with small horizontal lines. So we take that rotation back out here.
                        angle += houghRotationOffsetAngle;
                        angle -= 180;

                        if (angle > 80 && angle < 100)
                        {
                            anglesNear90.Add(angle);
                            if (showMessageBoxes)
                            {
                                imageOutP.Line(s.P1, s.P2, Scalar.Red, 1, LineTypes.AntiAlias, 0);
                            }
                        }

                        if (angle > -10 && angle < 10)
                        {
                            anglesNear0.Add(angle);
                            if (showMessageBoxes)
                            {
                                imageOutP.Line(s.P1, s.P2, Scalar.Orange, 1, LineTypes.AntiAlias, 0);
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        // there's always some infinity risk with atan, yes? Maybe. I don't want to fail on horizontal or vertical line edge cases.
                    }
                }

                double meanAngleNear0 = 0;
                if (anglesNear0.Count > 0)
                {
                    meanAngleNear0 = anglesNear0.Mean();
                }

                double meanAngleNear90 = 90;
                if (anglesNear90.Count > 0)
                {
                    meanAngleNear90 = anglesNear90.Mean();
                }



                // Use both the vertical and horizontal to calculate the image angle with a weighted average. It might be more accurate to use median instead of mean here.
                rotationAngle = ((meanAngleNear0) * anglesNear0.Count + (meanAngleNear90 - 90) * anglesNear90.Count) / (anglesNear0.Count + anglesNear90.Count);

                LogEvent("hough lines", showMessageBoxes, imageOutP, iboxProcessed);

                stopWatch.Stop();
                // Get the elapsed time as a TimeSpan value. Less than 400msec in debug mode via IDE.
                TimeSpan ts = stopWatch.Elapsed;
                msecElapsed = ts.TotalMilliseconds;
            }
            catch (Exception ex)
            {
            }

            return(rotationAngle);
        }
コード例 #10
0
ファイル: TemplateMatch.cs プロジェクト: EdHubbell/CSAMUtils
        public static void MatchTranspTemplate(string sFilePath, string sTemplateFilePath, bool showMessageBoxes, ImageBox rawImageBox, ImageBox processedImageBox)
        {
            Mat src = new Mat(sFilePath, ImreadModes.Grayscale);

            rawImageBox.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(src);

            // I'm not sure why we do the gauss - It seems like everyone does it, it's cheap, so we do it. ~Ed
            LogEvent("gauss", showMessageBoxes);
            Mat gauss = new Mat();

            Cv2.GaussianBlur(src, gauss, new OpenCvSharp.Size(3, 3), 2, 2);
            processedImageBox.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(gauss);

            // An attempt to get the contrast across the image to be somewhat uniform.
            LogEvent("clahe", showMessageBoxes);
            CLAHE claheFilter = Cv2.CreateCLAHE(4, new OpenCvSharp.Size(10, 10));
            Mat   clahe       = new Mat();

            claheFilter.Apply(gauss, clahe);
            processedImageBox.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(clahe);

            // Grab a template from some middle part of the image. Eventually, the size and location of this
            // template will be specified. It is very possible we'll have to grab multiple templates, as the
            // location of the template may impact the accuracy of the rotation.
            // e.g. - if the template is an image of a damaged device (which may happen at any location), the calculated
            // rotation may be wrong. Testing is required.
            // The locations where the template matches will create an image with lines that are offset from 0/90 degrees.
            // This is because we can assume that the devices are orthogonal to one another, even if the image itself is
            // offset rotationally.
            //Rect r1 = new Rect(new OpenCvSharp.Point(1000, 1000), new OpenCvSharp.Size(500, 300));
            //var roi = new Mat(clahe, r1);
            //Mat template = new Mat(new OpenCvSharp.Size(500, 300), MatType.CV_8UC1);
            //roi.CopyTo(template);


            Mat template = new Mat(sTemplateFilePath, ImreadModes.Grayscale);

            processedImageBox.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(template);

            //LogEvent("preotsu", showMessageBoxes);
            //Mat preotsu = new Mat();
            //Cv2.Threshold(clahe, preotsu, 240, 255, ThresholdTypes.Binary);
            //processedImageBox.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(preotsu);



            Mat templateMatch = new Mat();

            // # According to http://www.devsplanet.com/question/35658323, we can only use cv2.TM_SQDIFF or cv2.TM_CCORR_NORMED for a transparent template
            Cv2.MatchTemplate(clahe, template, templateMatch, TemplateMatchModes.CCorrNormed);


            //LogEvent("template match", showMessageBoxes);
            // can't do this - image is 32, needs to be 8uc1 or similar to convert to bitmap.
            //processedImageBox.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(templateMatch);


            Mat converted = new Mat();

            converted = templateMatch.Normalize(0, 255, NormTypes.MinMax);

            converted.ConvertTo(converted, MatType.CV_8UC1);

            LogEvent("template match converted", showMessageBoxes);
            processedImageBox.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(converted);

            // This winnows down the number of matches.
            LogEvent("thresh", showMessageBoxes);
            Mat thresh = new Mat();

            Cv2.Threshold(converted, thresh, 240, 255, ThresholdTypes.Binary);
            processedImageBox.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(thresh);


            string matchFilename = string.Format("Matches_{0}.jpg", DateTime.Now.ToString("yyyyMMdd_hhmmss"));

            thresh.SaveImage(@"..\..\..\ExampleFiles\Templates\" + matchFilename);
        }
コード例 #11
0
        /// <summary>
        /// Analyze video frame
        /// </summary>
        private void AnalyzeVideo()
        {
            var newImage = new Mat();
            var newFrame = new CFrame();

            newFrame = cframe;
            newImage = newFrame.Frame;
            newImage = newImage.CvtColor(ColorConversionCodes.BGR2GRAY);
            Utilities.debugmessage("Clahe: " + Cv2.Mean(newImage)[0]);
            clhLight.SetClipLimit(2);
            clhLight.Apply(newImage, newImage);
            var st = new Stopwatch();

            Utilities.debugmessage("Clahe: " + Cv2.Mean(newImage)[0]);
            st.Start();
            List <YoloItem> items = yoloWrapper.Detect(newImage.Resize(new OpenCvSharp.Size(w, h)).ToBytes()).ToList();

            var coeffW = ((float)newImage.Width / w);
            var coeffH = ((float)newImage.Height / h);


            foreach (var itm in items)
            {
                if (itm.Confidence < 0.66)
                {
                    break;
                }
                if (itm.Type == "dcoup")
                {
                    // Logging, Tracking

                    TimeSpan curTime = TimeSpan.FromMilliseconds(newFrame.frameNum * frameTime);

                    string[] _toAdd =
                    {
                        newFrame.frameNum.ToString(),
                        (itm.X * coeffW).ToString(),
                        (itm.Y * coeffH).ToString(),
                        ((itm.X * coeffW) + (itm.Width * coeffH)).ToString(),
                        ((itm.Y * coeffH) + (itm.Height * coeffW)).ToString(),
                        curTime.ToString(@"hh\:mm\:ss")
                    };

                    Log(_toAdd);


                    //if (myConnection.State == System.Data.ConnectionState.Open)
                    //{
                    //    try
                    //    {
                    //        dataBaseLog(_toAdd);
                    //    }
                    //    catch { }
                    //}

                    ListViewItem item1 = new ListViewItem(newFrame.frameNum.ToString(), 0);
                    item1.SubItems.Add((itm.X * coeffW).ToString());
                    item1.SubItems.Add((itm.Y * coeffH).ToString());
                    item1.SubItems.Add((itm.Width * coeffW + (itm.X * coeffW)).ToString());
                    item1.SubItems.Add((itm.Height * coeffH + (itm.Y * coeffH)).ToString());
                    item1.SubItems.Add(curTime.ToString(@"hh\:mm\:ss"));

                    window.listView1.BeginInvoke(new Action(() =>
                    {
                        window.listView1.Items.AddRange(new ListViewItem[] { item1 });
                    }));

                    // Tracking algorithm
                    // Speed limit ~80 KM/h (if lenght between coups is 12-15 meters)
                    if (((newFrame.frameNum * frameTime) > (checkTime + 650)) && Math.Abs((float)masTrackDcoup * coeffW - (float)itm.X * coeffW) < 30)
                    {
                        checkTime     = (newFrame.frameNum * frameTime);
                        masTrackDcoup = itm.X;
                    }
                    else if (((newFrame.frameNum * frameTime) > (checkTime + 650)))
                    {
                        CoupCount++;
                        checkTime     = (newFrame.frameNum * frameTime);
                        masTrackDcoup = itm.X;
                    }
                    else if (CoupCount == 0)
                    {
                        CoupCount++;
                        checkTime     = (newFrame.frameNum * frameTime);
                        masTrackDcoup = itm.X;
                    }
                    else
                    {
                        checkTime     = (newFrame.frameNum * frameTime);
                        masTrackDcoup = itm.X;
                    }
                }
            }
            st.Stop();

            window.toolStripTimer.Text   = "Elapsed time: " + st.ElapsedMilliseconds + " ms";
            window.toolStripCounter.Text = "Count: " + CoupCount;

            // Drawing new frame in picBox
            window.picBox.BeginInvoke(new Action(() =>
            {
                window.picBox.ImageIpl = newImage.Resize(new OpenCvSharp.Size(window.picBox.Width, window.picBox.Height));
                window.picBox.setRect(items);
            }));
            if (cframe.frameNum + 1 > frameCnt)
            {
                PLAY_FLAG = false;
                window.Invoke(new Action(() =>
                {
                    window.pauseButton.Enabled = false;
                    window.btn_Detect.Enabled  = true;
                }));
            }

            analyzeStarted = false;
        }