Ejemplo n.º 1
0
        private void btnSetFilter_Click(object sender, EventArgs e)
        {
            Mat filtered = new Mat();

            CvInvoke.BilateralFilter(imgClone.Image, filtered, 11, 17, 17);
            imgClone.Image = filtered;
        }
Ejemplo n.º 2
0
        public static Image <Gray, Byte> BilateralFilter(Image <Gray, Byte> grayImage)
        {
            var threshImage = grayImage.CopyBlank();

            CvInvoke.BilateralFilter(grayImage, threshImage, 25, 25 * 2, 25 / 2);
            return(threshImage);
        }
Ejemplo n.º 3
0
        public override bool Do()
        {
            ClearOut();
            try
            {
                BlurEnum blurType = (BlurEnum)EnumPropertys["Type"].Value;

                foreach (var src in Sources)
                {
                    foreach (var outData in src.GetOut())
                    {
                        Mat imgout    = new Mat();
                        int nBlurKern = IntPropertys["Kern"].Value * 2 + 1;
                        switch (blurType)
                        {
                        case BlurEnum.Gaussian:
                            CvInvoke.GaussianBlur(outData.Image, imgout, new System.Drawing.Size(nBlurKern, nBlurKern), 0);
                            break;

                        case BlurEnum.Median:
                            CvInvoke.MedianBlur(outData.Image, imgout, nBlurKern);
                            break;

                        case BlurEnum.Bileteral:
                            CvInvoke.BilateralFilter(outData.Image, imgout, nBlurKern, nBlurKern * 2, nBlurKern * 2, 0);
                            break;
                        }
                        this.m_out.Add(new DataSrc(imgout, outData.Info, false));
                    }
                }
            } catch { return(false); }
            return(true);
        }
Ejemplo n.º 4
0
        Mat dst = new Mat();//空白输出图像
        private void BLToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Subwindows sub = new Subwindows();

            sub.ShowDialog();

            CvInvoke.BilateralFilter(image, dst, sub.size, 25 * 2.0, 25 / 2.0);
            resultpic.Image = dst;
        }
Ejemplo n.º 5
0
        public Image <Gray, byte> bilateralFilter(Image <Gray, byte> grayImage)
        {
            //bilateral filter dari EmguCV
            //CvInvoke.BilateralFilter(inputArray, outputArray, int diameter of each pixel neighborhood, double sigmaColor, double sigmaSpace, opt borderType)
            var threshImage = grayImage.CopyBlank();

            CvInvoke.BilateralFilter(grayImage, threshImage, 7, 25, 60);
            return(threshImage);
        }
Ejemplo n.º 6
0
        private void filterToolStripMenuItem_Click(object sender, EventArgs e)
        {
            var img      = !processed ? new Image <Bgr, byte>((Bitmap)srcPicBox.Image) : new Image <Bgr, byte>((Bitmap)resPicBox.Image);
            var resImage = new Image <Bgr, byte>(img.Bitmap);

            CvInvoke.BilateralFilter(img, resImage, 25, 75, 75);
            resPicBox.Image = resImage.ToBitmap();
            processed       = true;
        }
Ejemplo n.º 7
0
        // не работает, дебаг
        public static Image <Gray, Byte> LineBasedDetection(Image <Gray, Byte> inputImg)
        {
            Image <Gray, Byte> smoothedImg = inputImg.SmoothMedian(3);

            //CvInvoke.CLAHE(smoothedImg, 40, new System.Drawing.Size(16, 16), smoothedImg);
            CvInvoke.BilateralFilter(inputImg.SmoothMedian(3), smoothedImg, 50, 140, -1);
            smoothedImg = smoothedImg.ThresholdToZero(new Gray(30));

            #region Kernels and stuff
            float[,] TopKernelMatrix = new float[, ] {
                { 0, -1, 0 },
                { 0, 0, 0 },
                { 0, 1, 0, }
            };
            float[,] BottomKernelMatrix = new float[, ] {
                { 0, 1, 0 },
                { 0, 0, 0 },
                { 0, -1, 0, }
            };
            float[,] LeftKernelMatrix = new float[, ] {
                { 0, 0, 0 },
                { 1, 0, -1 },
                { 0, 0, 0, }
            };
            float[,] RightKernelMatrix = new float[, ] {
                { 0, 0, 0 },
                { -1, 0, 1 },
                { 0, 0, 0, }
            };
            ConvolutionKernelF TopKernel    = new ConvolutionKernelF(TopKernelMatrix);
            ConvolutionKernelF BottomKernel = new ConvolutionKernelF(BottomKernelMatrix);
            ConvolutionKernelF LeftKernel   = new ConvolutionKernelF(LeftKernelMatrix);
            ConvolutionKernelF RightKernel  = new ConvolutionKernelF(RightKernelMatrix);
            #endregion

            #region Craft top / bottom / left and right images with borders
            Image <Gray, Byte> TopImg    = new Image <Gray, byte>(inputImg.Size);
            Image <Gray, Byte> BottomImg = new Image <Gray, byte>(inputImg.Size);
            Image <Gray, Byte> LeftImg   = new Image <Gray, byte>(inputImg.Size);
            Image <Gray, Byte> RightImg  = new Image <Gray, byte>(inputImg.Size);


            CvInvoke.Filter2D(smoothedImg, TopImg, TopKernel, new Point(1, 1));
            CvInvoke.Filter2D(smoothedImg, BottomImg, BottomKernel, new Point(1, 1));
            CvInvoke.Filter2D(smoothedImg, LeftImg, LeftKernel, new Point(1, 1));
            CvInvoke.Filter2D(smoothedImg, RightImg, RightKernel, new Point(1, 1));

            TopImg    = TopImg.ThresholdBinary(new Gray(15), new Gray(255));
            BottomImg = BottomImg.ThresholdBinary(new Gray(15), new Gray(255));
            LeftImg   = LeftImg.ThresholdBinary(new Gray(15), new Gray(255));
            RightImg  = RightImg.ThresholdBinary(new Gray(15), new Gray(255));
            #endregion

            # region Находим контуры для всех 4х изображений
Ejemplo n.º 8
0
    void blurFeed(Mat src, Mat dst, int size, int type)
    {
        switch (type)
        {
        case 0: CvInvoke.MedianBlur(src, dst, size); break;

        case 1: CvInvoke.GaussianBlur(src, dst, new Size(size, size), 0); break;

        case 2: CvInvoke.BilateralFilter(src, dst, 9, size, size); break;

        default: break;
        }
    }
Ejemplo n.º 9
0
        private void button6_Click(object sender, EventArgs e)
        {
            Image <Bgr, byte> dst = src.CopyBlank();

            CvInvoke.BilateralFilter(src, dst, g_nBilateralFilterValue, g_nBilateralFilterValue * 2, g_nBilateralFilterValue / 2);
            //第一个参数,InputArray类型的src,输入图像,即源图像,需要为8位或者浮点型单通道、三通道的图像。
            //第二个参数,OutputArray类型的dst,即目标图像,需要和源图片有一样的尺寸和类型。
            //第三个参数,int类型的d,表示在过滤过程中每个像素邻域的直径。如果这个值我们设其为非正数,那么OpenCV会从第五个参数sigmaSpace来计算出它来。
            //第四个参数,double类型的sigmaColor,颜色空间滤波器的sigma值。这个参数的值越大,就表明该像素邻域内有更宽广的颜色会被混合到一起,产生较大的半相等颜色区域。
            //第五个参数,double类型的sigmaSpace坐标空间中滤波器的sigma值,坐标空间的标注方差。他的数值越大,意味着越远的像素会相互影响,从而使更大的区域足够相似的颜色获取相同的颜色。当d > 0,d指定了邻域大小且与sigmaSpace无关。否则,d正比于sigmaSpace。
            //第六个参数,int类型的borderType,用于推断图像外部像素的某种边界模式。注意它有默认值BORDER_DEFAULT。
            imageBox2.Image = dst;
        }
Ejemplo n.º 10
0
        public Mat PreProcess(ref Mat image)
        {
            CvInvoke.BilateralFilter(image.Clone(), image, _configurations.BILATERAL_FILTER_DISTANCE, _configurations.BILATERAL_FILTER_SIGMA_COLOR, _configurations.BILATERAL_FILTER_SIGMA_SPACE);

            var viewer = new ImageViewer(image, "Bilateral Filter Applied");

            viewer.Show();

            image = HistogramEqualization(image, true);

            image.ConvertTo(image, DepthType.Default, (double)_configurations.CONTRAST_MULTIPLIED_BY_10 / 10.0, (double)_configurations.BRIGHTNESS_MULTIPLIED_BY_10 / 10.0);

            CvInvoke.BilateralFilter(image.Clone(), image, _configurations.BILATERAL_FILTER_DISTANCE, _configurations.BILATERAL_FILTER_SIGMA_COLOR, _configurations.BILATERAL_FILTER_SIGMA_SPACE);

            viewer = new ImageViewer(image, "Adjusted Contrast and Illumination");
            viewer.Show();

            return(image);
        }
Ejemplo n.º 11
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Image <Bgr, byte> ImageFrame  = capture.QueryFrame().ToImage <Bgr, Byte>();
            Image <Bgr, byte> filterImage = new Image <Bgr, byte>(ImageFrame.Size);

            CvInvoke.BilateralFilter(ImageFrame, filterImage, 9, 80, 150);
            Image <Hsv, byte> hsvImage = filterImage.Convert <Hsv, Byte>();

            // red circle
            Image <Gray, byte> redImage = ThresholdImage(hsvImage, new Point[] { new Point(0, 10), new Point(163, 179) });

            // blue circle
            Image <Gray, byte> blueImage = ThresholdImage(hsvImage, new Point[] { new Point(100, 120) });

            // green circle
            Image <Gray, byte> greenImage = ThresholdImage(hsvImage, new Point[] { new Point(50, 85) });

            // yellow circle
            Image <Gray, byte> yellowImage = ThresholdImage(hsvImage, new Point[] { new Point(20, 40) });

            var temp = DetectAndDrawCircles(redImage, ImageFrame, new MCvScalar(50, 50, 150), Color.Red);

            temp            = DetectAndDrawCircles(blueImage, temp, new MCvScalar(150, 50, 0), Color.Blue);
            temp            = DetectAndDrawCircles(yellowImage, temp, new MCvScalar(0, 200, 200), Color.Yellow);
            temp            = DetectAndDrawCircles(greenImage, temp, new MCvScalar(50, 150, 0), Color.Green);
            CamImgBox.Image = DrawTrack(temp);

            switch (filter)
            {
            case Color.Blue: outImgBox.Image = blueImage;
                break;

            case Color.Green: outImgBox.Image = greenImage;
                break;

            case Color.Yellow: outImgBox.Image = yellowImage;
                break;

            default:
                break;
            }
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Preprocess original image
        /// </summary>
        /// <param name="imgOriginal">Original image</param>
        /// <param name="imgGrayscale">Imaginea in format grayscale</param>
        /// <param name="errorCode">Error code</param>
        public static void Preprocess(Mat imgOriginal, ref Mat imgGrayscale, ref Mat imgThresh, ref int errorCode)
        {
            try
            {
                // Initiation of images that will be used in this method
                Mat imgBlurred         = new Mat();
                Mat imgBilateralFilter = new Mat();
                Mat imgTophat          = new Mat();
                Mat imgMaxContrast     = new Mat();

                // Structuring element used for morphological operation
                Mat structuringElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(40, 20), new Point(-1, -1));

                // Extract value channel only from original image to get imgGrayscale
                CvInvoke.CvtColor(imgOriginal, imgGrayscale, ColorConversion.Bgr2Gray);

                //imgMaxContrast = MaximizeContrast(imgGrayscale, ref errorCode);

                // Gaussian blur
                CvInvoke.GaussianBlur(imgGrayscale, imgBlurred, new Size(3, 3), 3);

                // Bilateral filter
                CvInvoke.BilateralFilter(imgBlurred, imgBilateralFilter, 10, 15, 15);

                // Morphological operation of tophap
                CvInvoke.MorphologyEx(imgBilateralFilter, imgTophat, MorphOp.Tophat, structuringElement, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());

                // Adaptive treshold from the tophat image
                CvInvoke.AdaptiveThreshold(imgTophat, imgThresh, 250, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 25, -5);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                errorCode = 4;
            }
        }
Ejemplo n.º 13
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="potentialPlates">List of potentials plates</param>
        /// <param name="pass">What pass is executing</param>
        /// <param name="errorCode">Error code</param>
        /// <returns>Key value pair of license plate image and its treshold</returns>
        private static KeyValuePair <Mat, Mat> FindPlateInPotentialPlates(List <Mat> potentialPlates, string pass, ref int errorCode)
        {
            // Initiate license plate variable
            KeyValuePair <Mat, Mat> licensePlate = new KeyValuePair <Mat, Mat>();

            // Initiate variable for if license plate was found
            bool licensePlateFound = false;

            try
            {
                // Check what pass
                if (pass == "firstPass")
                {
                    // Loop through all potential plates
                    foreach (var potentialPlate in potentialPlates)
                    {
                        // Initate images used
                        Mat imgBilateralFilter = new Mat();
                        Mat imgThresh          = new Mat();
                        Mat imgOpen            = new Mat();

                        // Initiate structuring elements used in morphological procedures
                        Mat structuringElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(150, 150), new Point(-1, -1));
                        Mat openElement        = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(7, 7), new Point(-1, -1));
                        Mat closeElement       = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(8, 15), new Point(-1, -1));
                        Mat erodeElement       = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));

                        // Initiate other variables
                        VectorOfVectorOfPoint contours           = new VectorOfVectorOfPoint();
                        List <Rectangle>      boundingRectangles = new List <Rectangle>();

                        // Maximize contrast of a potential plate
                        Mat imgMaxContrast = ImageProcess.MaximizeContrast(potentialPlate, ref errorCode);

                        // Apply bilateral filter to image with contrast maximized
                        CvInvoke.BilateralFilter(imgMaxContrast, imgBilateralFilter, 5, 20, 20); // 40 20 20

                        // Do morphological operation of tophat
                        CvInvoke.MorphologyEx(imgBilateralFilter, imgOpen, MorphOp.Tophat, structuringElement, new Point(-1, -1), 2, BorderType.Default, new MCvScalar());

                        // Do adaptive treshold to tophat image
                        CvInvoke.AdaptiveThreshold(imgOpen, imgThresh, 255, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 295, 19); // first pass

                        // Do morphological operation of open to thresholded image
                        CvInvoke.MorphologyEx(imgThresh, imgThresh, MorphOp.Open, openElement, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());

                        // Do morphological operation of erode to open image
                        CvInvoke.MorphologyEx(imgThresh, imgThresh, MorphOp.Erode, erodeElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar());

                        // Do morphological operation of close to thresholded image
                        CvInvoke.MorphologyEx(imgThresh, imgThresh, MorphOp.Close, closeElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar());


                        // Find contours in erode image
                        CvInvoke.FindContours(imgThresh, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                        Image <Bgr, byte> imgContours = imgThresh.ToImage <Bgr, byte>();

                        // Loop through found contours
                        for (int i = 0; i < contours.Size; i++)
                        {
                            // Create a bounding rectangle of the contour
                            var boundingRectangle = CvInvoke.BoundingRectangle(contours[i]);

                            // Check if contour has required dimensions
                            if (boundingRectangle.Height >= Properties.Settings.Default.boundingRectangleHeightMin && boundingRectangle.Height <= Properties.Settings.Default.boundingRectangleHeightMax && boundingRectangle.Width <= Properties.Settings.Default.boundingRectangleWidthMax)
                            {
                                CvInvoke.Rectangle(imgContours, boundingRectangle, new MCvScalar(0, 0, 255));

                                // Add bounding rectangle to the list of bounding rectangles
                                boundingRectangles.Add(boundingRectangle);
                            }
                        }

                        // Check if the list of bounding rectangles has the required number of items
                        if (boundingRectangles.Count >= 5 && boundingRectangles.Count <= 9)
                        {
                            // Make a key value pair with the cropped image and its threshold
                            licensePlate = new KeyValuePair <Mat, Mat>(potentialPlate, imgThresh);

                            licensePlateFound = true;

                            // Check if debug enabled
                            if (Properties.Settings.Default.debug)
                            {
                                // Show image of plate segmented
                                CvInvoke.Imshow("Plate segmented first pass", imgContours.Mat);
                                //CvInvoke.Imshow("Plate segmented first pass", potentialPlate);
                            }

                            // Stop from searching further
                            break;
                        }
                    }
                }
                else if (pass == "secondPass")
                {
                    foreach (var potentialPlate in potentialPlates)
                    {
                        // Initate images used
                        Mat imgBilateralFilter = new Mat();
                        Mat imgThresh          = new Mat();
                        Mat imgOpen            = new Mat();

                        // Initiate structuring elements used in morphological procedures
                        Mat structuringElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(150, 150), new Point(-1, -1));
                        Mat openElement        = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(7, 7), new Point(-1, -1));
                        Mat closeElement       = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1));
                        Mat erodeElement       = CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(3, 3), new Point(-1, -1));

                        // Initiate other variables
                        VectorOfVectorOfPoint contours           = new VectorOfVectorOfPoint();
                        List <Rectangle>      boundingRectangles = new List <Rectangle>();

                        // Maximize contrast of a potential plate
                        Mat imgMaxContrast = ImageProcess.MaximizeContrast(potentialPlate, ref errorCode);

                        // Apply bilateral filter to image with contrast maximized
                        CvInvoke.BilateralFilter(imgMaxContrast, imgBilateralFilter, 5, 10, 10); // 20 10 10


                        // Do morphological operation of tophat
                        CvInvoke.MorphologyEx(imgBilateralFilter, imgOpen, MorphOp.Tophat, structuringElement, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());

                        // Do adaptive treshold to tophat image
                        CvInvoke.AdaptiveThreshold(imgOpen, imgThresh, 255, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 105, 5); // second pass

                        // Do morphological operation of open to thresholded image
                        CvInvoke.MorphologyEx(imgThresh, imgThresh, MorphOp.Open, openElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar());

                        // Do morphological operation of erode to open image
                        CvInvoke.MorphologyEx(imgThresh, imgThresh, MorphOp.Erode, erodeElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar());

                        // Do morphological operation of close to thresholded image
                        CvInvoke.MorphologyEx(imgThresh, imgThresh, MorphOp.Close, closeElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar());

                        // Find contours in erode image
                        CvInvoke.FindContours(imgThresh, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);


                        Image <Bgr, byte> imgContours = imgThresh.ToImage <Bgr, byte>();

                        // Loop through found contours
                        for (int i = 0; i < contours.Size; i++)
                        {
                            // Create a bounding rectangle of the contour
                            var boundingRectangle = CvInvoke.BoundingRectangle(contours[i]);

                            // Check if contour has required dimensions
                            if (boundingRectangle.Height >= Properties.Settings.Default.boundingRectangleHeightMin && boundingRectangle.Height <= Properties.Settings.Default.boundingRectangleHeightMax)
                            {
                                CvInvoke.Rectangle(imgContours, boundingRectangle, new MCvScalar(0, 0, 255));

                                // Add bounding rectangle to the list of bounding rectangles
                                boundingRectangles.Add(boundingRectangle);
                            }
                        }

                        // Check if the list of bounding rectangles has the required number of items
                        if (boundingRectangles.Count >= 5 && boundingRectangles.Count <= 9)
                        {
                            // Make a key value pair with the cropped image and its threshold
                            licensePlate = new KeyValuePair <Mat, Mat>(potentialPlate, imgThresh);

                            licensePlateFound = true;

                            // Check if debug enabled
                            if (Properties.Settings.Default.debug)
                            {
                                // Show image of plate segmented
                                CvInvoke.Imshow("Plate segmented second pass", imgContours.Mat);
                                //CvInvoke.Imshow("Plate segmented second pass", potentialPlate);
                            }

                            // Stop from searching further
                            break;
                        }
                    }
                }

                // Check if license plate has been found
                if (!licensePlateFound)
                {
                    // Return null
                    return(new KeyValuePair <Mat, Mat>());
                }
                else
                {
                    // Return the key value pair
                    return(licensePlate);
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                errorCode = 8;
                return(new KeyValuePair <Mat, Mat>());
            }
        }
Ejemplo n.º 14
0
        private void altProcess(Bitmap bm, int level)
        {
            var img = new Image <Bgr, byte>(bm);

            if (level == 1)
            {
                var resImage = new Image <Bgr, byte>(img.Bitmap);
                CvInvoke.BilateralFilter(resImage, img, 30, 80, 80);
                CvInvoke.MedianBlur(img, img, 5);
                resImage = img;
            }
            else if (level == 2)
            {
                CvInvoke.MedianBlur(img, img, 5);
                var resImage = new Image <Bgr, byte>(img.Bitmap);
                CvInvoke.BilateralFilter(resImage, img, 25, 75, 75);
                CvInvoke.Blur(img, img, new Size(5, 5), new Point(0, 0));
            }

            var grayimage = new Image <Gray, byte>(bm);

            CvInvoke.CvtColor(img, grayimage, ColorConversion.Bgr2Gray);

            BlackBG(grayimage);

            Console.WriteLine("Filtering done");

            var cannyThreshold = GetKMeansThreshold(grayimage);

            label2.Text = cannyThreshold.ToString();

            Thresholding(grayimage, cannyThreshold);

            Console.WriteLine("Canny threshold using KMEANS found " + cannyThreshold);

            //Convert the image to grayscale and filter out the noise

            var cannyEdges = new UMat();

            Console.WriteLine("Canny threshold using KMEANS found " + cannyThreshold);

            var uimage = new UMat();

            CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThreshold);

            BlobCounter blobCounter = new BlobCounter( );

            if (level == 1)
            {
                blobCounter.FilterBlobs = true;
                blobCounter.MinHeight   = 25;
                blobCounter.MinWidth    = 25;
                blobCounter.ProcessImage(cannyEdges.Bitmap);
            }
            else
            {
                blobCounter.ProcessImage(grayimage.ToBitmap());
            }
            //blobCounter.ProcessImage(grayimage.ToBitmap());

            Blob[] blobs = blobCounter.GetObjectsInformation( );

            SimpleShapeChecker shapeChecker = new SimpleShapeChecker();


            var      triangleList = new List <Triangle2DF>();
            var      boxList      = new List <RotatedRect>();
            var      circleList   = new List <CircleF>();
            Bitmap   newBM        = new Bitmap(img.Bitmap);
            Graphics g            = Graphics.FromImage(newBM);
            Pen      redPen       = new Pen(Color.Red, 2);


            Pen yellowPen = new Pen(Color.Yellow, 2);
            Pen greenPen  = new Pen(Color.Green, 2);
            Pen bluePen   = new Pen(Color.Blue, 2);

            for (int i = 0, n = blobs.Length; i < n; i++)
            {
                List <IntPoint> edgePoints =
                    blobCounter.GetBlobsEdgePoints(blobs[i]);

                AForge.Point center;
                float        radius;

                if (shapeChecker.IsCircle(edgePoints, out center, out radius))
                {
                    //g.DrawEllipse(bluePen,
                    //    (float)(center.X - radius), (float)(center.Y - radius),
                    //    (float)(radius * 2), (float)(radius * 2));
                    circleList.Add(new CircleF(new PointF(center.X, center.Y), radius));
                }
                else
                {
                    List <IntPoint> corners;
                    if (edgePoints.Count > 1)
                    {
                        if (shapeChecker.IsQuadrilateral(edgePoints, out corners))
                        {
                            System.Console.WriteLine(corners.Count);
                            if (shapeChecker.CheckPolygonSubType(corners) ==
                                PolygonSubType.Square || shapeChecker.CheckPolygonSubType(corners) ==
                                PolygonSubType.Rectangle)
                            {
                                IntPoint minXY, maxXY;

                                PointsCloud.GetBoundingRectangle(corners, out minXY, out maxXY);
                                AForge.Point c = PointsCloud.GetCenterOfGravity(corners);
                                //g.DrawPolygon(greenPen, ToPointsArray(corners));
                                boxList.Add(new RotatedRect(new PointF(c.X, c.Y), new SizeF(maxXY.X - minXY.X, maxXY.Y - minXY.Y), 0));
                            }
                        }
                        else
                        {
                            corners = PointsCloud.FindQuadrilateralCorners(edgePoints);
                            if (corners.Count == 3)
                            {
                                Triangle2DF tri = new Triangle2DF(new PointF(corners[0].X, corners[0].Y), new PointF(corners[1].X, corners[1].Y), new PointF(corners[2].X, corners[2].Y));
                                triangleList.Add(tri);
                                //g.DrawPolygon(yellowPen, ToPointsArray(corners));
                            }
                            //g.DrawPolygon(redPen, ToPointsArray(corners));
                        }
                    }
                }
            }
            Console.WriteLine("boxes " + boxList.Count);
            Console.WriteLine("triangles " + triangleList.Count);
            Console.WriteLine("circles " + circleList.Count);

            redPen.Dispose();
            greenPen.Dispose();
            bluePen.Dispose();
            yellowPen.Dispose();
            //g.Dispose();
            resPicBox.Image = newBM;
            CircleF[] circles = circleList.ToArray();
            var       cList   = circles.ToList();

            FilterSame(boxList, triangleList, cList, img.Width * img.Height);
            circles = cList.ToArray();
            var points = new List <PointF>();

            var Image = img.CopyBlank();

            foreach (var triangle in triangleList)
            {
                Image.Draw(triangle, new Bgr(Color.Red), 3);
                points.Add(triangle.Centeroid);
            }

            foreach (var box in boxList)
            {
                Image.Draw(box, new Bgr(Color.Blue), 3);
                points.Add(box.Center);
            }

            foreach (var circle in circles)
            {
                Image.Draw(circle, new Bgr(Color.DarkCyan), 3);
                points.Add(circle.Center);
            }

            var listPoints = SortPoints(points, img);

            for (var i = 0; i < listPoints.Length; i++)
            {
                Console.WriteLine(listPoints[i].X.ToString() + " " + listPoints[i].Y.ToString());
            }

            System.Console.WriteLine("Points sorted, num of objects " + listPoints.Length.ToString());
            resPicBox.Image = (Image + img).ToBitmap();
            if (listPoints.Length > 3)
            {
                var bezSegList = InterpolatePointWithBeizerCurves(listPoints.ToList <PointF>());
                var gr         = Graphics.FromImage(resPicBox.Image);
                var p          = new Pen(Color.Red);

                foreach (BeizerCurveSegment seg in bezSegList)
                {
                    var bezierList = GetBez(new PointF[]
                                            { seg.StartPoint, seg.FirstControlPoint, seg.SecondControlPoint, seg.EndPoint });
                    for (var i = 0; i < bezierList.Length - 1; i++)
                    {
                        gr.DrawLine(p, bezierList[i], bezierList[i + 1]);
                    }
                }
            }
            else
            {
                var gr = Graphics.FromImage(resPicBox.Image);
                var p  = new Pen(Color.Red);

                for (var i = 0; i < listPoints.Length - 1; i++)
                {
                    gr.DrawLine(p, listPoints[i], listPoints[i + 1]);
                }
            }

            //var bezierList = GetBezierCurve1(listPoints);
        }
Ejemplo n.º 15
0
        private void Process(Bitmap bm, int level, double circleAccumulatorThreshold = 70.0, int maxRadius = 0)
        {
            double cannyThreshold = 0;
            var    img            = new Image <Bgr, byte>(bm);

            if (level == 1)
            {
                var resImage = new Image <Bgr, byte>(img.Bitmap);
                CvInvoke.BilateralFilter(resImage, img, 30, 75, 75);
                CvInvoke.MedianBlur(img, img, 5);
                resImage = img;
            }
            else if (level == 2)
            {
                CvInvoke.MedianBlur(img, img, 5);
                var resImage = new Image <Bgr, byte>(img.Bitmap);
                CvInvoke.BilateralFilter(resImage, img, 25, 75, 75);
                CvInvoke.Blur(img, img, new Size(5, 5), new Point(0, 0));
            }
            else if (level == 3)
            {
                var resImage = new Image <Bgr, byte>(img.Bitmap);
                CvInvoke.FastNlMeansDenoising(resImage, img);
            }
            var grayimage = new Image <Gray, byte>(bm);

            CvInvoke.CvtColor(img, grayimage, ColorConversion.Bgr2Gray);

            maxRadius = img.Width / 10;

            BlackBG(grayimage);

            Console.WriteLine("Filtering done");

            cannyThreshold = GetKMeansThreshold(grayimage);

            label2.Text = cannyThreshold.ToString();

            Thresholding(grayimage, cannyThreshold);

            Console.WriteLine("Canny threshold using KMEANS found " + cannyThreshold);

            //Convert the image to grayscale and filter out the noise
            var uimage = new UMat();

            CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);
            //uimage = grayimage.ToUMat();
            //resPicBox.Image = grayimage.Bitmap;

            var circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, 2.0, 5.0, cannyThreshold, circleAccumulatorThreshold, 1, maxRadius);

            Console.WriteLine("Circles found " + circles.Length.ToString());

            var cannyEdges = new UMat();

            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThreshold);


            var lines = CvInvoke.HoughLinesP(uimage,
                                             1,               //Distance resolution in pixel-related units
                                             Math.PI / 180.0, //Angle resolution measured in radians.
                                             1,               //threshold
                                             5,               //min Line length
                                             5);              //gap between lines

            Console.WriteLine("Lines detected");

            var triangleList = new List <Triangle2DF>();
            var boxList      = new List <RotatedRect>();

            using (var contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);
                var count = contours.Size;
                for (var i = 0; i < count; i++)
                {
                    using (var contour = contours[i])
                        using (var approxContour = new VectorOfPoint())
                        {
                            CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true);
                            if (!(CvInvoke.ContourArea(approxContour, false) > 10))
                            {
                                continue;
                            }
                            if (approxContour.Size == 3)
                            {
                                var pts = approxContour.ToArray();
                                triangleList.Add(new Triangle2DF(
                                                     pts[0],
                                                     pts[1],
                                                     pts[2]
                                                     ));
                            }
                            else if (approxContour.Size == 4)
                            {
                                var pts   = approxContour.ToArray();
                                var edges = PointCollection.PolyLine(pts, true);

                                var isRectangle = edges
                                                  .Select((t, j) => Math.Abs(edges[(j + 1) % edges.Length].GetExteriorAngleDegree(t)))
                                                  .All(angle => !(angle < 80) && !(angle > 100));
                                if (isRectangle)
                                {
                                    boxList.Add(CvInvoke.MinAreaRect(approxContour));
                                }
                            }
                        }
                }
            }

            System.Console.WriteLine("Boxes found " + boxList.Count.ToString());
            System.Console.WriteLine("Triangles found " + triangleList.Count.ToString());

            var cList = circles.ToList();

            FilterSame(boxList, triangleList, cList, img.Width * img.Height);
            circles = cList.ToArray();

            var points = new List <PointF>();

            var Image = img.CopyBlank();

            foreach (var triangle in triangleList)
            {
                Image.Draw(triangle, new Bgr(Color.Red), 3);
                points.Add(triangle.Centeroid);
            }

            foreach (var box in boxList)
            {
                Image.Draw(box, new Bgr(Color.Blue), 3);
                points.Add(box.Center);
            }

            foreach (var circle in circles)
            {
                Image.Draw(circle, new Bgr(Color.DarkCyan), 3);
                points.Add(circle.Center);
            }

            var listPoints = SortPoints(points, img);


            System.Console.WriteLine("Points sorted, num of objects " + listPoints.Length.ToString());

            resPicBox.Image = (Image + img).ToBitmap();
            if (listPoints.Length > 3)
            {
                var bezSegList = InterpolatePointWithBeizerCurves(listPoints.ToList <PointF>());
                var gr         = Graphics.FromImage(resPicBox.Image);
                var p          = new Pen(Color.Red);

                foreach (BeizerCurveSegment seg in bezSegList)
                {
                    var bezierList = GetBez(new PointF[]
                                            { seg.StartPoint, seg.FirstControlPoint, seg.SecondControlPoint, seg.EndPoint });
                    for (var i = 0; i < bezierList.Length - 1; i++)
                    {
                        gr.DrawLine(p, bezierList[i], bezierList[i + 1]);
                    }
                }
            }
            else
            {
                var gr = Graphics.FromImage(resPicBox.Image);
                var p  = new Pen(Color.Red);

                for (var i = 0; i < listPoints.Length - 1; i++)
                {
                    gr.DrawLine(p, listPoints[i], listPoints[i + 1]);
                }
            }
        }
        public string DetectLicencePlate(Bitmap image)
        {
            Image <Bgr, Byte> originalImage = new Image <Bgr, byte>(image);
            //originalImage = originalImage.Resize(500, 500, Emgu.CV.CvEnum.Inter.Linear, true);

            //Convert the image to grayscale and filter out the noise
            Image <Gray, Byte> grayImage = originalImage.Convert <Gray, Byte>();

            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(grayImage, pyrDown);
            CvInvoke.PyrUp(pyrDown, grayImage);

            // Noise removal with iterative bilateral filter(removes noise while preserving edges)
            Mat filteredImage = new Mat();

            CvInvoke.BilateralFilter(grayImage, filteredImage, 11, 17, 17);

            ///Find Edges of the grayscale image
            Mat edges = new Mat();

            CvInvoke.Canny(filteredImage, edges, 120, 200);

            filteredImage.Save("medziproj.jpg");

            // Find contours
            Mat hierarchy = new Mat();
            var edgesCopy = edges.Clone();
            var contours  = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(edgesCopy, contours, hierarchy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);

            //Create copy of original image to draw all contours
            var copyOriginalImage = originalImage.Clone();

            CvInvoke.DrawContours(copyOriginalImage, contours, -1, new MCvScalar(0, 255, 0, 255), 3);


            var newContoursArray = new VectorOfVectorOfPoint();

            int count = contours.Size;

            for (int i = 1; i < count; i++)
            {
                using (VectorOfPoint contour = contours[i])
                {
                    if (CvInvoke.ContourArea(contour, false) > 60)
                    {
                        newContoursArray.Push(contour);
                    }
                }
            }

            //Create copy of original image to draw all contours
            var copyOriginalImage2 = originalImage.Clone();

            CvInvoke.DrawContours(copyOriginalImage2, newContoursArray, -1, new MCvScalar(0, 255, 0, 255), 3);

            var numberPlateVectorArray = new VectorOfVectorOfPoint();
            var resultRectangle        = new Rectangle();

            for (int i = 0; i < newContoursArray.Size; i++)
            {
                var c      = newContoursArray[i];
                var peri   = CvInvoke.ArcLength(c, true);
                var approx = new VectorOfPoint();
                CvInvoke.ApproxPolyDP(c, approx, (0.02 * peri), true);

                ////mame 4hranu
                if (approx.Size == 4)
                {
                    //determine if all the angles in the contour are within [80, 100] degree
                    bool            isRectangle = true;
                    LineSegment2D[] edgesArr    = PointCollection.PolyLine(approx.ToArray(), true);

                    for (int j = 0; j < edgesArr.Length; j++)
                    {
                        double angle = Math.Abs(
                            edgesArr[(j + 1) % edgesArr.Length].GetExteriorAngleDegree(edgesArr[j]));
                        if (angle < 80 || angle > 100)
                        {
                            isRectangle = false;
                            break;
                        }
                    }

                    if (isRectangle)
                    {
                        numberPlateVectorArray.Push(approx);
                        resultRectangle = CvInvoke.BoundingRectangle(c);
                        break;
                    }
                }
            }

            var originalImage3 = originalImage.Clone();

            //# Drawing the selected contour on the original image
            CvInvoke.DrawContours(originalImage3, numberPlateVectorArray, -1, new MCvScalar(0, 255, 0), 3);

            //// save cropped image
            originalImage.ROI = resultRectangle;
            originalImage     = originalImage.Copy();
            CvInvoke.cvResetImageROI(originalImage);

            //originalImage = originalImage.Resize(200, 200, Emgu.CV.CvEnum.Inter.Linear, true);
            originalImage.Save(RESULT_PLATE);
            return(this.RecognizeText(RESULT_PLATE));
        }
Ejemplo n.º 17
0
        public CircleF FindCircle(Image <Gray, Byte> image, int estimatedRadius, int patternType, int error = 30)
        {
            circles.Clear();
            Image <Gray, Byte> bilateralFilteredImage, edgeDetectedImage, eroded, img;

            img = image.Clone();
            bilateralFilteredImage = new Mat().ToImage <Gray, byte>();
            edgeDetectedImage      = new Mat().ToImage <Gray, byte>();
            eroded = new Mat().ToImage <Gray, byte>();
            Mat hierarchy = new Mat();
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            //Mat

            CvInvoke.MorphologyEx(img, img, MorphOp.Close, GenerateEllipseKernel(13), new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
            CvInvoke.BilateralFilter(img, bilateralFilteredImage, 9, 30, 30);
            CvInvoke.Canny(bilateralFilteredImage, edgeDetectedImage, 25, 25);
            CvInvoke.MorphologyEx(edgeDetectedImage, eroded, MorphOp.Close, GenerateEllipseKernel(11), new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
            CvInvoke.FindContours(eroded, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxSimple);

            for (int i = 0; i < contours.Size; i++)
            {
                using (VectorOfPoint contour = contours[i])
                {
                    Rectangle r = CvInvoke.BoundingRectangle(contour);
                    double    w, h;
                    if (IsSquare(r.Width, r.Height))
                    {
                        w = r.Width;
                        h = r.Height;

                        double  rect_area   = ((w * w) / 4) * Math.PI;
                        CircleF circle      = CvInvoke.MinEnclosingCircle(contour);
                        double  circle_area = circle.Radius * circle.Radius * Math.PI;

                        if ((Math.Abs(rect_area - circle_area) < rect_area / 10) &&
                            (Math.Abs(Math.Sqrt(circle_area / 3.14) - estimatedRadius) < error) && (w > 21) && (h > 21))
                        {
                            CircleFWithScore temp = new CircleFWithScore(circle, CvInvoke.ContourArea(contour) / circle.Area);
                            circles.Add(temp);
                        }
                    }
                }
            }
            //CvInvoke.MatchTemplate(img,templ:templ,)
            //CvInvoke.Imshow("edge", eroded);
            //var watch = System.Diagnostics.Stopwatch.StartNew();
            CircleF result = FindHighestScoreCircle();

            if (MatchPattern(image, result, patternType))
            {
                //watch.Stop();
                //var elapsedMs = watch.ElapsedMilliseconds;
                //Console.WriteLine("\nFinished pattern matching in " + elapsedMs + "ms");
                return(result);
            }
            else
            {
                //watch.Stop();
                //var elapsedMs = watch.ElapsedMilliseconds;
                //Console.WriteLine("\nFinished pattern matching in " + elapsedMs + "ms");
                throw new IndexOutOfRangeException();
            }
        }
Ejemplo n.º 18
0
        private void btnOpenFile_Click(object sender, EventArgs e)
        {
            DialogResult drChosenFile;

            drChosenFile = ofdOpenFile.ShowDialog();

            if (drChosenFile != DialogResult.OK || ofdOpenFile.FileName == "")
            {
                lblChosenFile.Text = "file not chosen";
                return;
            }

            Mat inputFormColor;

            try
            {
                inputFormColor = new Mat(ofdOpenFile.FileName, ImreadModes.Color);
            }
            catch (Exception ex)
            {
                lblChosenFile.Text = "unable to open image, error: " + ex.Message;
                return;
            }

            if (inputFormColor == null)
            {
                lblChosenFile.Text = "unable to open image";
                return;
            }

            CvInvoke.Resize(inputFormColor, inputFormColor, new Size(0, 0), 0.25, 0.25, Inter.Cubic);

            Mat inputFormGray         = new Mat(inputFormColor.Size, DepthType.Cv8U, 1);
            Mat inputFormGrayFiltered = new Mat(inputFormColor.Size, DepthType.Cv8U, 1);
            Mat inputFormComplement   = new Mat(inputFormColor.Size, DepthType.Cv8U, 1);

            inputFormComplement.SetTo(new MCvScalar(255));

            CvInvoke.CvtColor(inputFormColor, inputFormGray, ColorConversion.Bgr2Gray);

            CvInvoke.BilateralFilter(inputFormGray, inputFormGrayFiltered, 9, 80.0, 80.0);

            CvInvoke.AdaptiveThreshold(inputFormGrayFiltered, inputFormGrayFiltered, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 11, 2);

            CvInvoke.Subtract(inputFormComplement, inputFormGrayFiltered, inputFormComplement);

            Size inputFormSize = new Size(450, 650);

            Point inputFormCenter = new Point(inputFormGray.Size.Width / 2, inputFormGray.Size.Height / 2);

            CvInvoke.GetRectSubPix(inputFormComplement, inputFormSize, inputFormCenter, inputFormComplement);

            Size sampleSize = new Size(45, 65);

            int[,] histogram = new int[10, 10];

            for (int i = 0; i < 10; i++)
            {
                for (int j = 0; j < 10; j++)
                {
                    int   Histogram        = 0;
                    Mat   sampleForm       = new Mat(sampleSize, DepthType.Cv8U, 1);
                    Point sampleSizeCenter = new Point(45 * i + 22, 65 * j + 32);
                    CvInvoke.GetRectSubPix(inputFormComplement, sampleSize, sampleSizeCenter, sampleForm);
                    for (int height = 0; height < sampleForm.Rows; height++)
                    {
                        for (int width = 0; width < sampleForm.Cols; width++)
                        {
                            if (Convert.ToInt16(sampleForm.GetData(height, width)[0]) != 0)
                            {
                                Histogram++;
                            }
                        }
                    }
                    histogram[i, j] = Histogram;
                }
            }

            string fileName      = System.IO.Path.GetFileNameWithoutExtension(ofdOpenFile.FileName);
            string arrayFileName = "C:\\CsharpTemp\\" + fileName + "_Array.txt";

            using (StreamWriter sw = new StreamWriter(arrayFileName))
            {
                //sw.WriteLine("HistogramArray");
                for (int i = 0; i < 10; i++)
                {
                    for (int j = 0; j < 10; j++)
                    {
                        sw.Write(histogram[j, i] + " ");
                    }
                    sw.WriteLine("");
                }

                sw.Flush();
                sw.Close();
            }



            ibOriginal.Image = inputFormColor;
            ibFiltered.Image = inputFormComplement;

            ////Converting Emgu.CV.Image to Bitmap
            ////The Image class has a ToBitmap() function that return a Bitmap object, which can easily be displayed on a PictureBox control using Windows Form.
            //Image<Gray, Byte> imageInputFormComplement = inputFormComplement.ToImage<Gray, Byte>();
            //Bitmap newImage = new Bitmap(ofdOpenFile.FileName);
            //newImage = imageInputFormComplement.ToBitmap();



            ////Create a 3 channel image of 400x200
            ////A "Hello World" Example
            //using (Mat img = new Mat(200, 400, DepthType.Cv8U, 3))
            //{
            //    img.SetTo(new Bgr(255, 0, 0).MCvScalar); // set it to Blue color

            //    //Draw "Hello, world." on the image using the specific font
            //    CvInvoke.PutText(
            //       img,
            //       "Hello, world",
            //       new System.Drawing.Point(10, 80),
            //       FontFace.HersheyComplex,
            //       1.0,
            //       new Bgr(0, 255, 0).MCvScalar);

            //    //Show the image using ImageViewer from Emgu.CV.UI
            //    ImageViewer.Show(img, "Test Window");
            //}
        }
Ejemplo n.º 19
0
        private void Processing(Image <Bgr, byte> ImgSource, TextColor Color = TextColor.White)
        {
            Rectangle ROICode = new Rectangle();

            mImgDetected = ImgSource.Copy();
            // create and ROI image
            Rectangle ROI = new Rectangle(ImgSource.Width / 2, ImgSource.Height / 10, ImgSource.Width, ImgSource.Height / 4);

            mImgDetected.ROI = ROI;
            // filter noise
            //detect code
            using (Image <Gray, byte> imgGray = mImgDetected.Convert <Gray, byte>())
            {
                using (Image <Gray, byte> imgFilter = new Image <Gray, byte>(imgGray.Size))
                {
                    CvInvoke.BilateralFilter(imgGray, imgFilter, 9, 49, 49);
                    using (Mat k = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1)))
                    {
                        if (Color == TextColor.White)
                        {
                            CvInvoke.Erode(imgFilter, imgFilter, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
                        }
                        else
                        {
                            CvInvoke.Dilate(imgFilter, imgFilter, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
                        }
                    }
                    using (Image <Gray, double> ImgSobel = new Image <Gray, double>(imgFilter.Size))
                    {
                        CvInvoke.Sobel(imgFilter, ImgSobel, Emgu.CV.CvEnum.DepthType.Cv64F, 1, 0, kSize: 1);
                        CvInvoke.ConvertScaleAbs(ImgSobel, imgFilter, 2, 0);
                        CvInvoke.Threshold(imgFilter, imgFilter, 20, 255, Emgu.CV.CvEnum.ThresholdType.Binary);

                        using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                        {
                            CvInvoke.FindContours(imgFilter, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                            for (int i = 0; i < contours.Size; i++)
                            {
                                double    s     = CvInvoke.ContourArea(contours[i]);
                                Rectangle bound = CvInvoke.BoundingRectangle(contours[i]);
                                if (bound.Height > 65 || s < 10)
                                {
                                    CvInvoke.DrawContours(imgFilter, contours, i, new MCvScalar(0), -1);
                                }
                            }
                        }
                        using (Mat k = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(107, 1), new Point(-1, -1)))
                        {
                            CvInvoke.MorphologyEx(imgFilter, imgFilter, Emgu.CV.CvEnum.MorphOp.Close, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
                        }
                        using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                        {
                            CvInvoke.FindContours(imgFilter, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                            double large_area  = 0;
                            int    index_large = 0;
                            for (int i = 0; i < contours.Size; i++)
                            {
                                double s = CvInvoke.ContourArea(contours[i]);
                                if (large_area < s)
                                {
                                    large_area  = s;
                                    index_large = i;
                                }
                            }
                            Rectangle boxFirstLine  = CvInvoke.BoundingRectangle(contours[index_large]);
                            Rectangle boxSecondLine = new Rectangle();
                            for (int i = 0; i < contours.Size; i++)
                            {
                                Rectangle b = CvInvoke.BoundingRectangle(contours[i]);
                                if (b.Y - boxFirstLine.Y < 120 && b.Y - boxFirstLine.Y > 0 && b.Width > 30)
                                {
                                    boxSecondLine = CvInvoke.BoundingRectangle(contours[i]);
                                    break;
                                }
                            }
                            ROICode        = new Rectangle(boxFirstLine.X - 20, boxFirstLine.Y - 20, boxFirstLine.Width + 40, boxSecondLine.Y + boxSecondLine.Height + 60 - boxFirstLine.X);
                            ROICode.X      = ROICode.X < 0 ? 0: ROICode.X;
                            ROICode.Y      = ROICode.Y < 0 ? 0 : ROICode.Y;
                            ROICode.Width  = ROICode.X + ROICode.Width > mImgDetected.Width ? mImgDetected.Width - ROICode.X : ROICode.Width;
                            ROICode.Height = ROICode.Y + ROICode.Height > mImgDetected.Height ? mImgDetected.Height - ROICode.Y : ROICode.Height;
                            mImgCroped     = mImgDetected.Copy();
                            mImgCroped.ROI = ROICode;
                            CvInvoke.Rectangle(mImgDetected, ROICode, new MCvScalar(255, 0, 0), 3);
                            mImgDetected.ROI = new Rectangle();
                            imb3.Image       = mImgCroped.Bitmap;
                        }
                    }
                }
            }
            // segment char text
            mImgSegment     = new Image <Gray, byte>(mImgCroped.Size);
            mImgCharSegment = mImgCroped.Copy();
            CvInvoke.CvtColor(mImgCroped, mImgSegment, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
            using (Mat k = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(5, 5), new Point(-1, -1)))
            {
                CvInvoke.MorphologyEx(mImgSegment, mImgSegment, Emgu.CV.CvEnum.MorphOp.Open, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
            }
            Image <Gray, byte> img_decode = mImgSegment.Copy();

            CvInvoke.BitwiseNot(img_decode, img_decode);
            CvInvoke.Imwrite("test.png", img_decode);
            CvInvoke.Threshold(mImgSegment, mImgSegment, 127, 255, Emgu.CV.CvEnum.ThresholdType.Binary);
            using (Mat k = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1)))
            {
                CvInvoke.MorphologyEx(mImgSegment, mImgSegment, Emgu.CV.CvEnum.MorphOp.Open, k, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar());
            }
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(mImgSegment, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                for (int i = 0; i < contours.Size; i++)
                {
                    Rectangle bound = CvInvoke.BoundingRectangle(contours[i]);
                    if (bound.Height > 60 || bound.Height < 30 || bound.Width > 35)
                    {
                        CvInvoke.DrawContours(mImgSegment, contours, i, new MCvScalar(0), -1);
                    }
                }
            }
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(mImgSegment, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                for (int i = 0; i < contours.Size; i++)
                {
                    Rectangle bound = CvInvoke.BoundingRectangle(contours[i]);
                    CvInvoke.Rectangle(mImgCharSegment, bound, new MCvScalar(0, 255, 0), 2);
                }
            }
            CvInvoke.Threshold(mImgSegment, mImgSegment, 127, 255, Emgu.CV.CvEnum.ThresholdType.BinaryInv);
            imb4.Image = mImgSegment.Bitmap;
            imb5.Image = mImgCharSegment.Bitmap;
            string code = Read(img_decode);

            Console.WriteLine(code);
            imb2.Image = mImgDetected.Bitmap;
        }
Ejemplo n.º 20
0
        void FilterChoice(Image <Bgr, Byte> imgToFilter, Image <Bgr, Byte> imgReference, string numPictureGood, string numDistortionGood, string numIntensityGood)
        {
            Image <Bgr, Byte> imgFiltered = new Image <Bgr, byte>(imgToFilter.Size);
            string            name        = numPictureGood + numDistortionGood + "_" + numIntensityGood;

            int sizeMask = (int)numericMinMask.Value;

            if (sizeMask % 2 == 0)
            {
                sizeMask++;
            }

            if (radioAvg.Checked)
            {
                for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                {
                    timer.Reset();
                    timer.Start();
                    CvInvoke.Blur(imgToFilter, imgFiltered, new Size(i, i), new Point(-1, -1));
                    timer.Stop();
                    EvaluationOfFilter(imgReference, imgFiltered);
                    row[0]  = "Filtr uśredniający";
                    row[1]  = i.ToString();
                    row[2]  = "-";
                    row[3]  = "-";
                    row[4]  = "-";
                    row[5]  = "-";
                    row[6]  = PSNRMSE.ToString();
                    row[7]  = PSNRMSD.ToString();
                    row[8]  = PSNRMED.ToString();
                    row[9]  = Marziliano.ToString();
                    row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                    ListViewItem listViewItem = new ListViewItem(row);
                    listViewEval.Items.Add(listViewItem);
                    SaveResults(imgFiltered, "Avg", i, 0, 0, 0, 0, row, name);
                }
            }

            if (radioGauss.Checked)
            {
                for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                {
                    for (int j = (int)numericMinSigmaX.Value; j <= (int)numericMaxSigmaX.Value; j = j + 5)
                    {
                        timer.Reset();
                        timer.Start();
                        CvInvoke.GaussianBlur(imgToFilter, imgFiltered, new Size(i, i), j);
                        timer.Stop();
                        EvaluationOfFilter(imgReference, imgFiltered);
                        row[0]  = "Filtr Gaussa";
                        row[1]  = i.ToString();
                        row[2]  = j.ToString();
                        row[3]  = "-";
                        row[4]  = "-";
                        row[5]  = "-";
                        row[6]  = PSNRMSE.ToString();
                        row[7]  = PSNRMSD.ToString();
                        row[8]  = PSNRMED.ToString();
                        row[9]  = Marziliano.ToString();
                        row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                        ListViewItem listViewItem = new ListViewItem(row);
                        listViewEval.Items.Add(listViewItem);
                        SaveResults(imgFiltered, "Gauss", i, j, 0, 0, 0, row, name);
                    }
                }
            }

            if (radioMedian.Checked)
            {
                for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                {
                    timer.Reset();
                    timer.Start();
                    CvInvoke.MedianBlur(imgToFilter, imgFiltered, i);
                    timer.Stop();
                    EvaluationOfFilter(imgReference, imgFiltered);
                    row[0]  = "Filtr medianowy";
                    row[1]  = i.ToString();
                    row[2]  = "-";
                    row[3]  = "-";
                    row[4]  = "-";
                    row[5]  = "-";
                    row[6]  = PSNRMSE.ToString();
                    row[7]  = PSNRMSD.ToString();
                    row[8]  = PSNRMED.ToString();
                    row[9]  = Marziliano.ToString();
                    row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                    ListViewItem listViewItem = new ListViewItem(row);
                    listViewEval.Items.Add(listViewItem);
                    SaveResults(imgFiltered, "Median", i, 0, 0, 0, 0, row, name);
                }
            }

            if (radioBilateral.Checked)
            {
                for (double j = (double)numericMinSigmaColor.Value; j <= (double)numericMaxSigmaColor.Value; j = j + 10)
                {
                    for (double k = (double)numericMinSigmaSpace.Value; k <= (double)numericMaxSigmaSpace.Value; k = k + 10)
                    {
                        timer.Reset();
                        timer.Start();
                        CvInvoke.BilateralFilter(imgToFilter, imgFiltered, -1, j, k);
                        timer.Stop();
                        EvaluationOfFilter(imgReference, imgFiltered);
                        row[0]  = "Filtr bilateralny";
                        row[1]  = "-";
                        row[2]  = "-";
                        row[3]  = j.ToString();
                        row[4]  = k.ToString();
                        row[5]  = "-";
                        row[6]  = PSNRMSE.ToString();
                        row[7]  = PSNRMSD.ToString();
                        row[8]  = PSNRMED.ToString();
                        row[9]  = Marziliano.ToString();
                        row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                        ListViewItem listViewItem = new ListViewItem(row);
                        listViewEval.Items.Add(listViewItem);
                        SaveResults(imgFiltered, "Bilateral", -1, 0, j, k, 0, row, name);
                    }
                }
            }

            if (radioKuwahara.Checked)
            {
                for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                {
                    timer.Reset();
                    timer.Start();
                    imgFiltered = KuwaharaFilter(imgToFilter, i);
                    timer.Stop();
                    EvaluationOfFilter(imgReference, imgFiltered);
                    row[0]  = "Filtr Kuwahara";
                    row[1]  = i.ToString();
                    row[2]  = "-";
                    row[3]  = "-";
                    row[4]  = "-";
                    row[5]  = "-";
                    row[6]  = PSNRMSE.ToString();
                    row[7]  = PSNRMSD.ToString();
                    row[8]  = PSNRMED.ToString();
                    row[9]  = Marziliano.ToString();
                    row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                    ListViewItem listViewItem = new ListViewItem(row);
                    listViewEval.Items.Add(listViewItem);
                    SaveResults(imgFiltered, "Kuwahara", i, 0, 0, 0, 0, row, name);
                }
            }

            if (radioUnsharp.Checked)
            {
                for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                {
                    float j = (float)numericUnsharpMaskMin.Value;
                    for (; j <= (float)numericUnsharpMaskMax.Value; j = (float)(j + 0.5))
                    {
                        timer.Reset();
                        timer.Start();
                        imgFiltered = UnsharpMasking(imgToFilter, i, j);
                        timer.Stop();
                        EvaluationOfFilter(imgReference, imgFiltered);
                        row[0]  = "Unsharp masking";
                        row[1]  = i.ToString();
                        row[2]  = "-";
                        row[3]  = "-";
                        row[4]  = "-";
                        row[5]  = "-";
                        row[6]  = PSNRMSE.ToString();
                        row[7]  = PSNRMSD.ToString();
                        row[8]  = PSNRMED.ToString();
                        row[9]  = Marziliano.ToString();
                        row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                        ListViewItem listViewItem = new ListViewItem(row);
                        listViewEval.Items.Add(listViewItem);
                        SaveResults(imgFiltered, "Unsharp", i, 0, 0, 0, j, row, name);
                    }
                }
            }

            if (radioEqualize.Checked)
            {
                timer.Reset();
                timer.Start();
                imgFiltered = EqualizeHistogram(imgToFilter);
                timer.Stop();
                EvaluationOfFilter(imgReference, imgFiltered);
                row[0]  = "Wyrownanie";
                row[1]  = "-";
                row[2]  = "-";
                row[3]  = "-";
                row[4]  = "-";
                row[5]  = "-";
                row[6]  = PSNRMSE.ToString();
                row[7]  = PSNRMSD.ToString();
                row[8]  = PSNRMED.ToString();
                row[9]  = Marziliano.ToString();
                row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                ListViewItem listViewItem = new ListViewItem(row);
                listViewEval.Items.Add(listViewItem);
                SaveResults(imgFiltered, "Eq", 0, 0, 0, 0, 0, row, name);
            }

            if (radioStretch.Checked)
            {
                timer.Reset();
                timer.Start();
                imgFiltered = StretchHistogram(imgToFilter);
                timer.Stop();
                EvaluationOfFilter(imgReference, imgFiltered);
                row[0]  = "Rozciągniecie";
                row[1]  = "-";
                row[2]  = "-";
                row[3]  = "-";
                row[4]  = "-";
                row[5]  = "-";
                row[6]  = PSNRMSE.ToString();
                row[7]  = PSNRMSD.ToString();
                row[8]  = PSNRMED.ToString();
                row[9]  = Marziliano.ToString();
                row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                ListViewItem listViewItem = new ListViewItem(row);
                listViewEval.Items.Add(listViewItem);
                SaveResults(imgFiltered, "Str", 0, 0, 0, 0, 0, row, name);
            }

            MessageBox.Show("Ukończono operacje");
        }
Ejemplo n.º 21
0
        void AllFilters(string path, string numPicture, string extension, Image <Bgr, Byte> imgReference)
        {
            Image <Bgr, Byte> imgFiltered;
            List <int>        rejectedDistortions = new List <int> {
                2, 3, 5, 8, 9, 11, 13, 15, 16, 18, 19, 21, 24
            };

            int sizeMask = (int)numericMinMask.Value;

            if (sizeMask % 2 == 0)
            {
                sizeMask++;
            }

            for (int numImage = 1; numImage <= 24; numImage++)
            {
                while (rejectedDistortions.Contains(numImage))
                {
                    numImage++;
                }


                for (int distortionLvl = 1; distortionLvl <= 5; distortionLvl = distortionLvl + 4)
                {
                    string numPath;

                    if (numImage < 10)
                    {
                        numPath = "0" + numImage.ToString() + "_" + distortionLvl.ToString();
                    }
                    else
                    {
                        numPath = numImage.ToString() + "_" + distortionLvl.ToString();
                    }
                    Image <Bgr, Byte> imgToFilter = new Image <Bgr, byte>(path + numPath + extension);
                    imgFiltered = new Image <Bgr, byte>(imgToFilter.Size);

                    // uśredniający
                    {
                        for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                        {
                            timer.Reset();
                            timer.Start();
                            CvInvoke.Blur(imgToFilter, imgFiltered, new Size(i, i), new Point(-1, -1));
                            timer.Stop();
                            EvaluationOfFilter(imgReference, imgFiltered);
                            row[0]  = "Filtr uśredniający";
                            row[1]  = i.ToString();
                            row[2]  = "-";
                            row[3]  = "-";
                            row[4]  = "-";
                            row[5]  = "-";
                            row[6]  = PSNRMSE.ToString();
                            row[7]  = PSNRMSD.ToString();
                            row[8]  = PSNRMED.ToString();
                            row[9]  = Marziliano.ToString();
                            row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                            ListViewItem listViewItem = new ListViewItem(row);
                            listViewEval.Items.Add(listViewItem);
                            SaveResults(imgFiltered, "Avg", i, 0, 0, 0, 0, row, numPicture + numPath);
                        }
                    }

                    // Gaussa
                    {
                        for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                        {
                            for (int j = (int)numericMinSigmaX.Value; j <= (int)numericMaxSigmaX.Value; j = j + 5)
                            {
                                timer.Reset();
                                timer.Start();
                                CvInvoke.GaussianBlur(imgToFilter, imgFiltered, new Size(i, i), j);
                                timer.Stop();
                                EvaluationOfFilter(imgReference, imgFiltered);
                                row[0]  = "Filtr Gaussa";
                                row[1]  = i.ToString();
                                row[2]  = j.ToString();
                                row[3]  = "-";
                                row[4]  = "-";
                                row[5]  = "-";
                                row[6]  = PSNRMSE.ToString();
                                row[7]  = PSNRMSD.ToString();
                                row[8]  = PSNRMED.ToString();
                                row[9]  = Marziliano.ToString();
                                row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                                ListViewItem listViewItem = new ListViewItem(row);
                                listViewEval.Items.Add(listViewItem);
                                SaveResults(imgFiltered, "Gauss", i, j, 0, 0, 0, row, numPicture + numPath);
                            }
                        }
                    }

                    // medianowy
                    {
                        for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                        {
                            timer.Reset();
                            timer.Start();
                            CvInvoke.MedianBlur(imgToFilter, imgFiltered, i);
                            timer.Stop();
                            EvaluationOfFilter(imgReference, imgFiltered);
                            row[0]  = "Filtr medianowy";
                            row[1]  = i.ToString();
                            row[2]  = "-";
                            row[3]  = "-";
                            row[4]  = "-";
                            row[5]  = "-";
                            row[6]  = PSNRMSE.ToString();
                            row[7]  = PSNRMSD.ToString();
                            row[8]  = PSNRMED.ToString();
                            row[9]  = Marziliano.ToString();
                            row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                            ListViewItem listViewItem = new ListViewItem(row);
                            listViewEval.Items.Add(listViewItem);
                            SaveResults(imgFiltered, "Median", i, 0, 0, 0, 0, row, numPicture + numPath);
                        }
                    }

                    // bilateralny
                    {
                        for (double j = (double)numericMinSigmaColor.Value; j <= (double)numericMaxSigmaColor.Value; j = j + 10)
                        {
                            for (double k = (double)numericMinSigmaSpace.Value; k <= (double)numericMaxSigmaSpace.Value; k = k + 10)
                            {
                                timer.Reset();
                                timer.Start();
                                CvInvoke.BilateralFilter(imgToFilter, imgFiltered, -1, j, k);
                                timer.Stop();
                                EvaluationOfFilter(imgReference, imgFiltered);
                                row[0]  = "Filtr bilateralny";
                                row[1]  = "-";
                                row[2]  = "-";
                                row[3]  = j.ToString();
                                row[4]  = k.ToString();
                                row[5]  = "-";
                                row[6]  = PSNRMSE.ToString();
                                row[7]  = PSNRMSD.ToString();
                                row[8]  = PSNRMED.ToString();
                                row[9]  = Marziliano.ToString();
                                row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                                ListViewItem listViewItem = new ListViewItem(row);
                                listViewEval.Items.Add(listViewItem);
                                SaveResults(imgFiltered, "Bilateral", -1, 0, j, k, 0, row, numPicture + numPath);
                            }
                        }
                    }

                    // Kuwahara
                    {
                        for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                        {
                            timer.Reset();
                            timer.Start();
                            imgFiltered = KuwaharaFilter(imgToFilter, i);
                            timer.Stop();
                            EvaluationOfFilter(imgReference, imgFiltered);
                            row[0]  = "Filtr Kuwahara";
                            row[1]  = i.ToString();
                            row[2]  = "-";
                            row[3]  = "-";
                            row[4]  = "-";
                            row[5]  = "-";
                            row[6]  = PSNRMSE.ToString();
                            row[7]  = PSNRMSD.ToString();
                            row[8]  = PSNRMED.ToString();
                            row[9]  = Marziliano.ToString();
                            row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                            ListViewItem listViewItem = new ListViewItem(row);
                            listViewEval.Items.Add(listViewItem);
                            SaveResults(imgFiltered, "Kuwahara", i, 0, 0, 0, 0, row, numPicture + numPath);
                        }
                    }

                    // unsharp masking
                    {
                        for (int i = sizeMask; i <= (int)numericMaxMask.Value; i = i + 2)
                        {
                            float j = (float)numericUnsharpMaskMin.Value;
                            for (; j <= (float)numericUnsharpMaskMax.Value; j = (float)(j + 0.5))
                            {
                                timer.Reset();
                                timer.Start();
                                imgFiltered = UnsharpMasking(imgToFilter, i, j);
                                timer.Stop();
                                EvaluationOfFilter(imgReference, imgFiltered);
                                row[0]  = "Unsharp masking";
                                row[1]  = i.ToString();
                                row[2]  = "-";
                                row[3]  = "-";
                                row[4]  = "-";
                                row[5]  = "-";
                                row[6]  = PSNRMSE.ToString();
                                row[7]  = PSNRMSD.ToString();
                                row[8]  = PSNRMED.ToString();
                                row[9]  = Marziliano.ToString();
                                row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                                ListViewItem listViewItem = new ListViewItem(row);
                                listViewEval.Items.Add(listViewItem);
                                SaveResults(imgFiltered, "Unsharp", i, 0, 0, 0, j, row, numPicture + numPath);
                            }
                        }
                    }

                    // wyrówanie histogramu
                    {
                        timer.Reset();
                        timer.Start();
                        imgFiltered = EqualizeHistogram(imgToFilter);
                        timer.Stop();
                        EvaluationOfFilter(imgReference, imgFiltered);
                        row[0]  = "Wyrownanie";
                        row[1]  = "-";
                        row[2]  = "-";
                        row[3]  = "-";
                        row[4]  = "-";
                        row[5]  = "-";
                        row[6]  = PSNRMSE.ToString();
                        row[7]  = PSNRMSD.ToString();
                        row[8]  = PSNRMED.ToString();
                        row[9]  = Marziliano.ToString();
                        row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                        ListViewItem listViewItem = new ListViewItem(row);
                        listViewEval.Items.Add(listViewItem);
                        SaveResults(imgFiltered, "Eq", 0, 0, 0, 0, 0, row, numPicture + numPath);
                    }

                    // rozciągnięcie histogramu
                    {
                        timer.Reset();
                        timer.Start();
                        imgFiltered = StretchHistogram(imgToFilter);
                        timer.Stop();
                        EvaluationOfFilter(imgReference, imgFiltered);
                        row[0]  = "Rozciągniecie";
                        row[1]  = "-";
                        row[2]  = "-";
                        row[3]  = "-";
                        row[4]  = "-";
                        row[5]  = "-";
                        row[6]  = PSNRMSE.ToString();
                        row[7]  = PSNRMSD.ToString();
                        row[8]  = PSNRMED.ToString();
                        row[9]  = Marziliano.ToString();
                        row[10] = timer.Elapsed.TotalMilliseconds.ToString();
                        ListViewItem listViewItem = new ListViewItem(row);
                        listViewEval.Items.Add(listViewItem);
                        SaveResults(imgFiltered, "Str", 0, 0, 0, 0, 0, row, numPicture + numPath);
                    }
                }
            }
        }
Ejemplo n.º 22
0
        public static Bitmap ExtractDocumentFromBitmap(Bitmap bitmap)
        {
            if (bitmap.Width > bitmap.Height)
            {
                bitmap.RotateFlip(RotateFlipType.Rotate90FlipNone);
            }
            using (var image = new Image <Bgr, byte>(bitmap))
                using (var imageGray = image.Convert <Gray, byte>())
                    using (var filteredImage = new Image <Bgr, byte>(bitmap))
                        using (var cannyEdges = new UMat())
                            using (var contours = new VectorOfVectorOfPoint())
                            {
                                CvInvoke.BilateralFilter(imageGray, filteredImage, 9, 75, 75);
                                CvInvoke.AdaptiveThreshold(filteredImage, filteredImage, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 115, 4);
                                CvInvoke.MedianBlur(filteredImage, filteredImage, 11);
                                CvInvoke.CopyMakeBorder(filteredImage, filteredImage, 5, 5, 5, 5, BorderType.Constant, new MCvScalar(0, 0, 0));
                                CvInvoke.Canny(filteredImage, cannyEdges, 200, 250);
                                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);

                                var    cannyEdgesHeight = cannyEdges.Bitmap.Height;
                                var    cannyEdgesWidth  = cannyEdges.Bitmap.Width;
                                var    areaContour      = (cannyEdgesHeight - 10) * (cannyEdgesWidth - 10);
                                var    areaCount        = areaContour * 0.5;
                                double areaContour2;

                                var sourcePointsVector = new VectorOfPoint();
                                for (int i = 0; i < contours.Size; i++)
                                {
                                    using (var cont = contours[i])
                                    {
                                        CvInvoke.ApproxPolyDP(cont, cont, CvInvoke.ArcLength(cont, true) * 0.05, true);
                                        if (cont.Size == 4 && CvInvoke.IsContourConvex(cont) &&
                                            areaCount < CvInvoke.ContourArea(cont) &&
                                            CvInvoke.ContourArea(cont) < areaContour)
                                        {
                                            sourcePointsVector = cont;
                                            areaContour2       = CvInvoke.ContourArea(cont);
                                            sortVector(sourcePointsVector);
                                            break;
                                        }
                                    }
                                }
                                var sortedVector     = sortVector(sourcePointsVector);
                                var vectorWithOffset = addOffsetToVector(sourcePointsVector, -5);

                                var euclideanHeight = new int[] { getEuclideanDistance(vectorWithOffset[0], vectorWithOffset[1]), getEuclideanDistance(vectorWithOffset[2], vectorWithOffset[3]) }.Max();
                                var euclideanWidth = new int[] { getEuclideanDistance(vectorWithOffset[0], vectorWithOffset[2]), getEuclideanDistance(vectorWithOffset[1], vectorWithOffset[3]) }.Max();

                                VectorOfPoint targetPoints = new VectorOfPoint(new Point[]
                                {
                                    new Point(0, 0),
                                    new Point(0, euclideanWidth),
                                    new Point(euclideanHeight, euclideanWidth),
                                    new Point(euclideanHeight, 0)
                                }.ToArray());

                                var source = sortVector(vectorWithOffset).ToArray().Select(x => new PointF(x.X, x.Y)).ToArray();
                                var target = sortVector(targetPoints).ToArray().Select(x => new PointF(x.X, x.Y)).ToArray();
                                var tran   = CvInvoke.GetPerspectiveTransform(source, target);
                                CvInvoke.WarpPerspective(image, image, tran, new Size(euclideanHeight, euclideanWidth));

                                return(image.ToBitmap((int)standardDocumentWidth * 4, (int)standardDocumentHeight * 4));
                            }
        }
Ejemplo n.º 23
0
        public Mat[] GetPreprocessedFace(Mat srcImg,
                                         int desiredFaceWidth, int desiredFaceHeight,
                                         CascadeClassifier faceCascade,
                                         CascadeClassifier eyeCascade1, CascadeClassifier eyeCascade2,
                                         bool doLeftAndRightSeparately, bool isCamera)
        {
            faceRect = null;
            faceRect = odh.DetectManyObjects(srcImg, faceCascade, HelperFeature.DESIRED_CAMERA_WIDTH, isCamera);
            List <Mat> faceData = new List <Mat>();

            if (faceRect == null)
            {
                return(faceData.ToArray());
            }

            for (int i = 0; i < faceRect.Length; i++)
            {
                InitVarible();
                if (faceRect[i].Width > 0)
                {
                    storeFaceRect = faceRect[i];
                    Mat faceImg = new Mat(srcImg, faceRect[i]);


                    Mat gray = new Mat();
                    if (faceImg.NumberOfChannels == 3)
                    {
                        CvInvoke.CvtColor(faceImg, gray, ColorConversion.Bgr2Gray);
                    }
                    else if (faceImg.NumberOfChannels == 4)
                    {
                        CvInvoke.CvtColor(faceImg, gray, ColorConversion.Bgr2Gray);
                    }
                    else
                    {
                        gray = faceImg;
                    }

                    bool isDefaut = DetectBothEyes(gray, eyeCascade1, eyeCascade2, isCamera);

                    storeLeftEye  = leftEye;
                    storeRightEye = rightEye;
                    if (leftEye.X >= 0 && rightEye.X >= 0)
                    {
                        PointF eyesCenter = new PointF((leftEye.X + rightEye.X) * 0.5f, (leftEye.Y + rightEye.Y) * 0.5f);

                        double dy    = (rightEye.Y - leftEye.Y);
                        double dx    = Math.Abs(rightEye.X - leftEye.X);
                        double len   = Math.Sqrt(dx * dx + dy * dy);
                        double angle = Math.Atan2(dy, dx) * 180.0 / 3.14159265359;
                        if (isDefaut)
                        {
                            angle = 0;
                        }

                        const double DESIRED_RIGHT_EYE_X = (1.0f - HelperFeature.DESIRED_LEFT_EYE_X);

                        double desiredLen = (DESIRED_RIGHT_EYE_X - HelperFeature.DESIRED_LEFT_EYE_X) * desiredFaceWidth;
                        double scale      = desiredLen / len;

                        Mat rot_mat = new Mat();
                        CvInvoke.GetRotationMatrix2D(eyesCenter, angle, scale, rot_mat);

                        Image <Gray, double> temp_rot_mat = rot_mat.ToImage <Gray, double>();
                        temp_rot_mat.Data[0, 2, 0] += desiredFaceWidth * 0.5f - eyesCenter.X;
                        temp_rot_mat.Data[1, 2, 0] += desiredFaceHeight * HelperFeature.DESIRED_LEFT_EYE_Y - eyesCenter.Y;

                        rot_mat = temp_rot_mat.Mat;

                        Mat warped = new Mat(desiredFaceHeight, desiredFaceWidth, DepthType.Cv8U, 128);
                        CvInvoke.WarpAffine(gray, warped, rot_mat, warped.Size);

                        if (!doLeftAndRightSeparately)
                        {
                            CvInvoke.EqualizeHist(warped, warped);
                        }
                        else
                        {
                            warped = EqualizeLeftAndRightHalves(warped);
                        }

                        Mat filtered = new Mat(warped.Size, DepthType.Cv8U, 0);
                        CvInvoke.BilateralFilter(warped, filtered, 0, 10, 2.0);

                        Mat mask = warped;
                        mask = FilterMatContructor(mask);

                        Point faceCenter = new Point(desiredFaceWidth / 2, (int)Math.Round(desiredFaceHeight * HelperFeature.FACE_ELLIPSE_CY));
                        Size  size       = new Size((int)Math.Round(desiredFaceWidth * HelperFeature.FACE_ELLIPSE_W),
                                                    (int)Math.Round(desiredFaceHeight * HelperFeature.FACE_ELLIPSE_H));

                        CvInvoke.Ellipse(mask, faceCenter, size, 0, 0, 360, new MCvScalar(0));
                        mask = FilterEllipse(mask);

                        Mat dstImg = new Mat();
                        filtered.CopyTo(dstImg, mask);

                        faceData.Add(dstImg);

                        CvInvoke.Rectangle(srcImg, faceRect[i], new MCvScalar(0, 255, 0), 1);
                        //CvInvoke.Circle(face, leftEye, 2, new MCvScalar(0, 255, 0), 2);
                        //CvInvoke.Circle(face, rightEye, 2, new MCvScalar(0, 255, 0), 2);
                    }
                }
            }
            return(faceData.ToArray());
        }
Ejemplo n.º 24
0
        private VectorOfPointF FindTarget(Mat input)
        {
            var cannyEdges = new Mat();
            var uImage     = new Mat();
            var gray       = new Mat();
            var blurred    = new Mat();

            // Convert to greyscale
            CvInvoke.CvtColor(input, uImage, ColorConversion.Bgr2Gray);
            CvInvoke.BilateralFilter(uImage, gray, 11, 17, 17);
            uImage.Dispose();
            CvInvoke.MedianBlur(gray, blurred, 11);
            gray.Dispose();
            // Get edged version
            const double cannyThreshold        = 0.0;
            const double cannyThresholdLinking = 200.0;

            CvInvoke.Canny(blurred, cannyEdges, cannyThreshold, cannyThresholdLinking);
            blurred.Dispose();

            if (_showEdged)
            {
                CvInvoke.Imshow("Source", cannyEdges);
            }
            // Get contours
            using (var contours = new VectorOfVectorOfPoint()) {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List,
                                      ChainApproxMethod.ChainApproxSimple);
                var count = contours.Size;
                // Looping contours
                for (var i = 0; i < count; i++)
                {
                    var approxContour = new VectorOfPoint();
                    using var contour = contours[i];
                    CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.02,
                                          true);
                    if (approxContour.Size != 4)
                    {
                        continue;
                    }
                    var cntArea = CvInvoke.ContourArea(approxContour);
                    if (!(cntArea / _srcArea > .15))
                    {
                        continue;
                    }
                    var pointOut = new VectorOfPointF(SortPoints(approxContour));
                    _targets.Add(VPointFToVPoint(pointOut));
                }

                if (_showEdged)
                {
                    var color = new MCvScalar(255, 255, 0);
                    CvInvoke.DrawContours(input, contours, -1, color);
                    CvInvoke.Imshow("Edged", cannyEdges);
                }
            }

            var output = CountTargets(_targets);

            cannyEdges.Dispose();
            return(output);
        }
Ejemplo n.º 25
0
        public EdgeAlogorithm(string ImageFilePath)
        {
            Mat src       = CvInvoke.Imread(ImageFilePath, ImreadModes.Color);
            Mat dst       = CvInvoke.Imread(ImageFilePath, ImreadModes.Color);
            Mat OtsuImage = CvInvoke.Imread(ImageFilePath, ImreadModes.Color);

            CvInvoke.GaussianBlur(src, src, new System.Drawing.Size(3, 3), 0, 0);       // 平滑濾波
            CvInvoke.BilateralFilter(src, dst, 9, 30, 30);                              // 雙邊濾波

            Mat dst1 = CvInvoke.Imread(ImageFilePath, ImreadModes.Color);
            Mat dst2 = CvInvoke.Imread(ImageFilePath, ImreadModes.Color);

            CvInvoke.Canny(dst, dst1, 10, 100, 3);                             // Canny Edge Decteion
            CvInvoke.Threshold(dst1, dst2, 128, 255, ThresholdType.BinaryInv); //反轉影像,讓邊緣呈現黑線

            Mat GraylevelImage = CvInvoke.Imread(ImageFilePath, ImreadModes.Color);

            CvInvoke.CvtColor(src, GraylevelImage, ColorConversion.Rgb2Gray);
            CvInvoke.Threshold(GraylevelImage, OtsuImage, 0, 255, ThresholdType.Otsu);

            Mat labels = CvInvoke.Imread(ImageFilePath, ImreadModes.Color), stats = CvInvoke.Imread(ImageFilePath, ImreadModes.Color), centroids = CvInvoke.Imread(ImageFilePath, ImreadModes.Color);
            int nccomps = CvInvoke.ConnectedComponentsWithStats(OtsuImage, labels, stats, centroids);

            List <uint[]> colors;

            colors = new List <uint[]>();
            colors.Add(new uint[] { 0, 0, 0 });
            for (int loopnum = 1; loopnum < nccomps; loopnum++)
            {
                Random r = new Random();

                if (stats.GetData(loopnum, 4)[0] < 200)
                {
                    colors.Add(new uint[3] {
                        0, 0, 0
                    });
                }
                else
                {
                    colors.Add(new uint[3] {
                        (uint)r.Next(0, 256), (uint)r.Next(0, 256), (uint)r.Next(0, 256)
                    });
                }
            }

            //var mat = new Mat(200, 200, DepthType.Cv64F, 3);
            for (int row = 0; row < src.Rows; row++)
            {
                for (int col = 0; col < src.Cols; col++)
                {
                    src.SetValue(row, col, (byte)col);
                    var value = src.GetValue(row, col);
                    //Console.WriteLine("Value = " + value);

                    /*if (value != 255 && Test < 50)
                     * {
                     *  OtsuImage.SetValue(row, col, (byte)255);
                     *  Test++;
                     *  //System.Console.ReadKey();
                     * }*/
                }
            }
            ImageContour imagecontour = new ImageContour();

            Mat ImageContourImage = imagecontour.ImageContourMethod(OtsuImage);

            Step1        = dst;
            Step2        = dst2;
            Step3        = OtsuImage;
            ContourImage = ImageContourImage;

            /*
             * String win1 = "Canny_1"; //The name of the window
             * CvInvoke.NamedWindow(win1); //Create the window using the specific name
             *
             * String win2 = "Canny_2"; //The name of the window
             * CvInvoke.NamedWindow(win2); //Create the window using the specific name
             *
             * String win3 = "OtsuImage"; //The name of the window
             * CvInvoke.NamedWindow(win3); //Create the window using the specific name
             *
             * String win4 = "OtsuImage_ImageContourMethod"; //The name of the window
             * CvInvoke.NamedWindow(win4); //Create the window using the specific name
             *
             * CvInvoke.Imshow(win1, dst); //Show the image
             * CvInvoke.Imshow(win2, dst2); //Show the image
             * CvInvoke.Imshow(win3, OtsuImage); //Show the image
             * CvInvoke.Imshow(win4, ImageContourImage); //Show the image
             * CvInvoke.WaitKey(0);  //Wait for the key pressing event
             * CvInvoke.DestroyWindow(win1); //Destroy the window if key is pressed
             * CvInvoke.DestroyWindow(win2); //Destroy the window if key is pressed
             * CvInvoke.DestroyWindow(win3); //Destroy the window if key is pressed
             * CvInvoke.DestroyWindow(win4); //Destroy the window if key is pressed
             */
        }
Ejemplo n.º 26
0
        /// <summary>
        /// From a given cropped image looks for single characters contours.
        /// </summary>
        /// <remarks>
        /// After applying basic bilateral and canny filter it searches the contours of single characters.
        /// Each contour is being validated whether it is a contour of a possible character or not. It uses some
        /// basic total area and ratio (width/height) comparison. If contour met the requiremenents,
        /// it is then marked with a bounding rectangular to simplify the further recognition.
        /// </remarks>
        /// <param name="croppedImage">Cropped image on which we want to search characters' contours</param>
        /// <param name="split">Whether the plate should be splitted into single characters array</param>
        /// <returns></returns>
        private Mat FindPlateContours(Image <Hsv, byte> croppedImage, bool split = false)
        {
            using (var bilateralMat = new Mat())
                using (var cannyMat = new Mat())
                    using (var contours = new VectorOfVectorOfPoint())
                    {
                        var treshold = ImageConverter.GetAutomatedTreshold(bilateralMat.ToImage <Gray, byte>());
                        var cleanMat = new Mat(croppedImage.Rows, croppedImage.Cols, DepthType.Cv8U, 1);
                        cleanMat.SetTo(new MCvScalar(255));

                        CvInvoke.BilateralFilter(
                            croppedImage,
                            bilateralMat,
                            20, 20, 10);

                        CvInvoke.Canny(
                            croppedImage,
                            cannyMat,
                            treshold.Lower,
                            treshold.Upper);

                        CvInvoke.FindContours(
                            cannyMat.ToImage <Gray, byte>(),
                            contours,
                            null,
                            RetrType.External,
                            ChainApproxMethod.ChainApproxSimple);

                        for (var i = 0; i < contours.Size; i++)
                        {
                            var contour       = contours[i];
                            var smoothContour = new VectorOfPoint(contour.Size);

                            CvInvoke.ApproxPolyDP(
                                contour,
                                smoothContour,
                                0.01 * CvInvoke.ArcLength(contour, true), true);

                            if (smoothContour.Size >= 4)
                            {
                                var    rect  = CvInvoke.BoundingRectangle(smoothContour);
                                double ratio = (double)rect.Width / rect.Height;
                                double area  = rect.Width * (double)rect.Height;

                                if (ratio <= 1.5 &&
                                    ratio >= 0.1 &&
                                    area >= 400)
                                {
                                    Mat ROI = new Mat(cleanMat, rect);
                                    Mat potentialCharArea = new Mat(croppedImage.Convert <Gray, byte>().Mat, rect);
                                    potentialCharArea.CopyTo(ROI);
                                    // w celach analizy dokładamy wartości ratio
                                    CvInvoke.Rectangle(croppedImage, rect, new MCvScalar(0, 250, 0));
                                    CvInvoke.PutText(croppedImage, Math.Round(ratio, 3).ToString(), rect.Location, FontFace.HersheyDuplex, fontScale: 0.5d, new MCvScalar(0, 250, 0));
                                }
                            }
                        }

                        return(cleanMat);
                    }
        }