Canny edge detector.

The filter searches for objects' edges by applying Canny edge detector. The implementation follows Bill Green's Canny edge detection tutorial.

The implemented canny edge detector has one difference with the above linked algorithm. The difference is in hysteresis step, which is a bit simplified (getting faster as a result). On the hysteresis step each pixel is compared with two threshold values: HighThreshold and LowThreshold. If pixel's value is greater or equal to HighThreshold, then it is kept as edge pixel. If pixel's value is greater or equal to LowThreshold, then it is kept as edge pixel only if there is at least one neighbouring pixel (8 neighbours are checked) which has value greater or equal to HighThreshold; otherwise it is none edge pixel. In the case if pixel's value is less than LowThreshold, then it is marked as none edge immediately.

The filter accepts 8 bpp grayscale images for processing.

Sample usage:

// create filter CannyEdgeDetector filter = new CannyEdgeDetector( ); // apply the filter filter.ApplyInPlace( image );

Initial image:

Result image:

Inheritance: BaseUsingCopyPartialFilter
Example #1
0
        /// <summary>
        /// Applies Canny filter on specified image. (uses AForge implementation)
        /// </summary>
        /// <param name="im">image</param>
        /// <param name="lowThreshold">Low threshold value used for hysteresis</param>
        /// <param name="highThreshold">High threshold value used for hysteresis</param>
        /// <param name="sigma">Gaussian sigma</param>
        /// <param name="gaussianSize">Gaussian filter size</param>
        /// <returns>Processed image with Canny filter</returns>
        public static Gray<byte>[,] Canny(this Gray<byte>[,] im, byte lowThreshold = 20, byte highThreshold = 100, double sigma = 1.4, int gaussianSize = 5)
        {
            CannyEdgeDetector canny = new CannyEdgeDetector(lowThreshold, highThreshold, sigma);
            canny.GaussianSize = gaussianSize;

            return im.ApplyFilter(canny);
        }
Example #2
0
        static ImageProcessor()
        {
            imageQuantizer = new ColorImageQuantizer(new MedianCutQuantizer());

            grayscale = Grayscale.CommonAlgorithms.BT709;

            edgeDetector = new CannyEdgeDetector();
        }
Example #3
0
        public static Bitmap DoIt(Bitmap bmp)
        {
            GaussianBlur blur = new GaussianBlur(10, 50);
            Bitmap blurred = blur.Apply(bmp);

            Bitmap ret = new Bitmap(blurred.Width, blurred.Height, blurred.PixelFormat);

            for (int y = 0; y < blurred.Height; y++)
            {
                for (int x = 0; x < blurred.Width; x++)
                {
                    Color blurredColor = blurred.GetPixel(x, y);
                    Color orinalColor = bmp.GetPixel(x, y);

                    Color newColor = Color.FromArgb((blurredColor.R + orinalColor.R * 2) / 3,
                        (blurredColor.G + orinalColor.G * 2) / 3,
                        (blurredColor.B + orinalColor.B * 2) / 3);

                    ret.SetPixel(x, y, newColor);
                }
            }

            GammaCorrection gc = new GammaCorrection(.8);
            gc.ApplyInPlace(ret);

            Sharpen sharpen = new Sharpen();
            sharpen.ApplyInPlace(ret);

            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            Bitmap gray = filter.Apply(ret);

            CannyEdgeDetector canny = new CannyEdgeDetector();
            gray = canny.Apply(gray);

            for (int y = 0; y < gray.Height; y++)
            {
                for (int x=0;x < gray.Width; x++)
                {
                    if(gray.GetPixel(x,y).R > 0)
                    {
                        Color retColor = ret.GetPixel(x, y);
                        Color newColor = Color.FromArgb(
                            (int)(retColor.R * .7),
                            (int)(retColor.G * .7),
                            (int)(retColor.B * .7));

                        ret.SetPixel(x, y, newColor);
                    }
                }
            }

            return ret;
        }
Example #4
0
        /// <summary>
        /// Celem metody jest przykadrowanie otrzymanego obrazka, tak aby widok zawierał jedynie kartkę papieru, oraz zwrócenie go do metody UploadFile
        /// </summary>
        /// <param name="img"></param>
        /// <returns></returns>
        public System.Drawing.Image CropImage(System.Drawing.Image img)
        {
            //DocumentSkewChecker skewChecker = new DocumentSkewChecker();
            //double angle = skewChecker.GetSkewAngle(image);
            //RotateBilinear rotationFilter = new RotateBilinear(-angle);
            //rotationFilter.FillColor = Color.White;
            //Bitmap rotatedImage = rotationFilter.Apply(image);

            Bitmap image = new Bitmap(img);

            UnmanagedImage grayImage = null;

            if (image.PixelFormat == PixelFormat.Format8bppIndexed)
            {
                grayImage = UnmanagedImage.FromManagedImage(image);
            }
            else
            {
                grayImage = UnmanagedImage.Create(image.Width, image.Height,
                    PixelFormat.Format8bppIndexed);
                Grayscale.CommonAlgorithms.BT709.Apply(UnmanagedImage.FromManagedImage(image), grayImage);
            }

            CannyEdgeDetector edgeDetector = new CannyEdgeDetector();
            UnmanagedImage edgesImage = edgeDetector.Apply(grayImage);

            OtsuThreshold thresholdFilter = new OtsuThreshold();
            thresholdFilter.ApplyInPlace(edgesImage);

            Dilatation DilatationFilter = new Dilatation();
            DilatationFilter.Apply(edgesImage);

            Opening OpeningFilter = new Opening();
            OpeningFilter.Apply(edgesImage);

            BlobCounter blobCounter = new BlobCounter();
            blobCounter.MinHeight = 32;
            blobCounter.MinWidth = 32;
            blobCounter.FilterBlobs = true;
            blobCounter.ObjectsOrder = ObjectsOrder.Size;

            blobCounter.ProcessImage(edgesImage);
            Blob[] blobs = blobCounter.GetObjectsInformation();

            ExtractBiggestBlob BiggestBlob = new ExtractBiggestBlob();
            Bitmap biggestBlobsImage = BiggestBlob.Apply(edgesImage.ToManagedImage());
            AForge.IntPoint BiggestBlogCorners = BiggestBlob.BlobPosition;

            Crop cropFilter = new Crop(new Rectangle(BiggestBlogCorners.X, BiggestBlogCorners.Y, biggestBlobsImage.Width, biggestBlobsImage.Height));
            Bitmap croppedImage = cropFilter.Apply(image);
            return croppedImage;
        }
Example #5
0
 public static Bitmap canny(Bitmap image)
 {
     try
     {
         Bitmap gris = EscalaGrises(image);
         AForge.Imaging.Filters.CannyEdgeDetector filter = new AForge.Imaging.Filters.CannyEdgeDetector();
         System.Drawing.Bitmap newImage = filter.Apply(gris);
         return(newImage);
     }
     catch (ArgumentException ex)
     {
         MessageBox.Show(ex.ToString(), "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
         return(null);
     }
 }
Example #6
0
        private WriteableBitmap FindPlate(IEnumerable<Rect> rects, WriteableBitmap image)
        {
            WriteableBitmap bestCandidate = null;
            
            foreach (var rect in rects)
            {
                var croppedImage = image.Crop(rect);
                var edgeFilter = new CannyEdgeDetector();
                var smoothFilter = new Median();
                var grayFilter = new Grayscale(0.2125, 0.7154, 0.0721);
                var blobCounter = new BlobCounter();
                var cutTop = croppedImage.PixelHeight * 0.3;

                croppedImage = croppedImage.Crop(new Rect(0, cutTop, croppedImage.PixelWidth, croppedImage.PixelHeight));

                var bitmap = (Bitmap)croppedImage;
                var grayImage = grayFilter.Apply(bitmap);

                bitmap = smoothFilter.Apply(grayImage);
                edgeFilter.ApplyInPlace(bitmap);
                blobCounter.ProcessImage(bitmap);

                var blobs = blobCounter.GetObjectsInformation();
                var possibleChars = new List<Rectangle>();

                foreach (var blob in blobs)
                {
                    var objRectangle = blob.Rectangle;
                    var ratio = (double)objRectangle.Height / (double)objRectangle.Width;
                    
                    if (ratio >= 1.16d && ratio <= 6.3d)
                    {
                        possibleChars.Add(objRectangle);
                    }
                }

                if (possibleChars.Count == 0)
                {
                    continue;
                }

                bestCandidate = croppedImage;
            }

            return bestCandidate;
        }
        //1. bright pixel / dark pixel
        //2.lowest gray level
        //3.highest gray level
        //4.number of peaks in the x direction.
        //5.number of peaks in the y direction.
        public static double[] ExtractFeatures(Bitmap bmp,int i)
        {
            //Apply GrayScale
            GrayscaleBT709 greyScaleFilter = new GrayscaleBT709();
            Bitmap newBmp = greyScaleFilter.Apply((Bitmap)bmp.Clone());

            //Count Blobs
            BlobCounter blobCounter = new BlobCounter();
            blobCounter.BackgroundThreshold = Color.FromArgb(255, 150, 150, 150);
            blobCounter.ProcessImage(newBmp);
            int blobs = (blobCounter.ObjectsCount - 1) * 30;

            //Count Corner
            SusanCornersDetector scd = new SusanCornersDetector();
            scd.DifferenceThreshold = 70;
            scd.GeometricalThreshold = 8;
            int corners = scd.ProcessImage((Bitmap)newBmp.Clone()).Count();

            //Apply Edge Filter
            CannyEdgeDetector filter = new CannyEdgeDetector();
            //newBmp = filter.Apply(newBmp);
            Histogram his = new HorizontalIntensityStatistics(newBmp).Gray;
            Histogram vis = new VerticalIntensityStatistics(newBmp).Gray;

            HoughLineTransformation lineTransform = new HoughLineTransformation();
            // apply Hough line transofrm
            lineTransform.ProcessImage(filter.Apply(newBmp));
            Bitmap houghLineImage = lineTransform.ToBitmap();
            // get lines using relative intensity
            HoughLine[] lines = lineTransform.GetLinesByRelativeIntensity(1);
            int linesCount = lines.Count() * 30;

            double[] features = new double[13] { blobs, corners, his.Max, his.Min, his.Mean, his.Median, his.StdDev,
                vis.Max, vis.Min, vis.Mean, vis.Median, vis.StdDev,linesCount};

            //double[] features = new double[3] { blobs, corners,lines};

            newBmp.Save(String.Format("test{0}.bmp",i));
            return features;
        }
Example #8
0
        private Bitmap GetEdgedImage(WriteableBitmap writeableBitmap)
        {
            var edgeFilter = new CannyEdgeDetector(255, 0);
            var smoothFilter = new Median();
            var grayFilter = new Grayscale(0.2125, 0.7154, 0.0721);
            var bitmap = (Bitmap)writeableBitmap;

            bitmap = grayFilter.Apply(bitmap);
            smoothFilter.ApplyInPlace(bitmap);
            edgeFilter.ApplyInPlace(bitmap);

            return bitmap;
        }
Example #9
0
        public static FoundColorSpaces Find(Bitmap bmp)
        {
            FoundColorSpaces ret = new FoundColorSpaces();

            ret.OriginalColorSpace = bmp;
            ret.GrayColorSpace = Grayscale.CommonAlgorithms.BT709.Apply(ret.OriginalColorSpace);

            CannyEdgeDetector edges = new CannyEdgeDetector();
            Threshold threshold = new Threshold();
            ret.Edges = threshold.Apply(edges.Apply(ret.GrayColorSpace));

            ret.BinaryColorSpace = threshold.Apply(ret.GrayColorSpace);

            //ret.CorrectedRGBColorSpace = new Bitmap(bmp.Width, bmp.Height, PixelFormat.Format24bppRgb); 

            return ret;
        }
Example #10
0
        Bitmap processImageCenterline(string filename)
        {
            using (Bitmap SampleImage = (Bitmap)System.Drawing.Image.FromFile(filename))
            {
                // We must convert it to grayscale because
                // the filter accepts 8 bpp grayscale images
                Grayscale GF = new Grayscale(0.2125, 0.7154, 0.0721);
                using (Bitmap GSampleImage = GF.Apply(SampleImage))
                {
                    // Saving the grayscale image, so we could see it later
                    // Detecting image edges and saving the result
                    CannyEdgeDetector CED = new CannyEdgeDetector(0, 70);
                    //CED.ApplyInPlace(GSampleImage);
                    //BradleyLocalThresholding bwfilter = new BradleyLocalThresholding();
                    //bwfilter.ApplyInPlace(GSampleImage);
                    // create filter

                    // create filter sequence
                    FiltersSequence filterSequence = new FiltersSequence();

                    // Inverting image to get white image on black background
                    filterSequence.Add(new Invert());
                    filterSequence.Add(new SISThreshold());
                    // Finding skeleton
                    filterSequence.Add(new SimpleSkeletonization());
                    //clean image from scratches
                    short[,] se = new short[,] {
                                                { -1, -1, -1 },
                                                {  0,  1,  0 },
                                                { -1, -1, -1 }};

                    filterSequence.Add(new HitAndMiss(se, HitAndMiss.Modes.Thinning));
                    //filterSequence.Add(new Median( ));
                    //filterSequence.Add(new Dilatation());
                    filterSequence.Add(new Invert());
                    // apply the filter and rfeturn value
                    return filterSequence.Apply(GSampleImage);

                }

            }
        }
 /// <summary>
 /// 
 /// </summary>
 /// <param name="image"></param>
 /// <returns></returns>
 public static Image CannyEdges(this Image image)
 {
     CannyEdgeDetector cannyEdge = new CannyEdgeDetector();
     return cannyEdge.Apply(BitmapGrayscale(image));
 }
Example #12
0
        private void button2_Click(object sender, EventArgs e)
        {
            button2.Text = "处理中";
            switch (comboBox4.SelectedIndex)
            {
            case 0:
            {
                Bitmap      temp    = (Bitmap)pictureBox1.Image;
                OilPainting filter3 = new OilPainting(10);
                // apply the filter
                filter3.ApplyInPlace(temp);
                this.pictureBox2.Image = ResizeBitmap(temp);
                break;
            }

            case 1:
            {
                Bitmap temp = (Bitmap)pictureBox1.Image;
                temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp);
                DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector();
                temp = edgeDetector.Apply(temp);
                temp = new Threshold((int)numericUpDown1.Value).Apply(temp);

                //FillHoles filter2 = new FillHoles();
                //filter2.MaxHoleHeight = MinHeight;
                //filter2.MaxHoleWidth = MaxWidth;
                //filter2.CoupledSizeFiltering = false;
                // apply the filter
                //temp = filter2.Apply(temp);
                //HorizontalRunLengthSmoothing hrls = new HorizontalRunLengthSmoothing(40);
                // apply the filter
                //hrls.ApplyInPlace(temp);

                /*AForge.Imaging.Filters.BlobsFiltering filter = new AForge.Imaging.Filters.BlobsFiltering();
                 * // 设置过滤条件(对象长、宽至少为70)
                 * filter.CoupledSizeFiltering = true;
                 * filter.MaxWidth = (int)numericUpDown3.Value;
                 * filter.MaxHeight = (int)numericUpDown4.Value;
                 * filter.MinWidth = (int)numericUpDown5.Value;
                 * filter.MinHeight = (int)numericUpDown6.Value;
                 * filter.ApplyInPlace(temp);*/



                BlobCounter blobCounter = new BlobCounter();

                blobCounter.MinHeight    = 32;
                blobCounter.MinWidth     = 32;
                blobCounter.FilterBlobs  = true;
                blobCounter.ObjectsOrder = ObjectsOrder.Size;

                // 4 - find all stand alone blobs
                blobCounter.ProcessImage(temp);
                Blob[]             blobs        = blobCounter.GetObjectsInformation();
                SimpleShapeChecker shapeChecker = new SimpleShapeChecker();

                List <IntPoint> corners  = null;
                List <IntPoint> corners2 = null;
                for (int i = 0, n = blobs.Length; i < n; i++)
                {
                    List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);
                    // does it look like a quadrilateral ?
                    if (shapeChecker.IsQuadrilateral(edgePoints, out corners))
                    {
                        // get edge points on the left and on the right side
                        List <IntPoint> leftEdgePoints, rightEdgePoints;
                        blobCounter.GetBlobsLeftAndRightEdges(blobs[i],
                                                              out leftEdgePoints, out rightEdgePoints);
                        listBox1.DataSource = leftEdgePoints;
                        listBox2.DataSource = rightEdgePoints;
                    }
                }
                //listBox1.DataSource = corners;
                //listBox2.DataSource = corners2;
                this.pictureBox1.Image = temp;
                break;
            }

            case 2:
            {
                Bitmap bt2 = new Bitmap(@"D:\TCL条码\截图01.bmp");
                Bitmap bt1 = new Bitmap(@"D:\TCL条码\截图03.bmp");
                //Bitmap bt1 = new Bitmap(pictureBox2.Image);
                ExhaustiveTemplateMatching tm = new ExhaustiveTemplateMatching(0.80f);
                //基于一定的相似性阈值获得匹配块
                TemplateMatch[] matchings = tm.ProcessImage(bt1, bt2);
                BitmapData      data      = bt1.LockBits(
                    new Rectangle(0, 0, bt1.Width, bt1.Height),
                    ImageLockMode.ReadWrite, bt1.PixelFormat);
                foreach (TemplateMatch m in matchings)
                {
                    Drawing.Rectangle(data, m.Rectangle, Color.Red);
                }
                bt1.UnlockBits(data);
                pictureBox2.Image = bt1;
                break;
            }

            case 3:
            {
                Bitmap bt2 = new Bitmap(@"D:\TCL条码\Canny算法.png");
                AForge.Imaging.Filters.BlobsFiltering filter = new AForge.Imaging.Filters.BlobsFiltering();
                // 设置过滤条件(对象长、宽至少为70)
                filter.CoupledSizeFiltering = true;
                filter.MaxWidth             = (int)numericUpDown3.Value;
                filter.MaxHeight            = (int)numericUpDown4.Value;
                filter.MinWidth             = (int)numericUpDown5.Value;
                filter.MinHeight            = (int)numericUpDown6.Value;
                filter.ApplyInPlace(bt2);
                pictureBox1.Image = bt2;
                byte[] RESULT = BitmapToBytes(bt2);
                break;
            }

            case 4:
            {
                Bitmap temp = (Bitmap)pictureBox1.Image;
                temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp);
                AForge.Imaging.Filters.CannyEdgeDetector filter = new AForge.Imaging.Filters.CannyEdgeDetector();
                filter.ApplyInPlace(temp);
                pictureBox2.Image = temp;
                break;
            }
            }

            button2.Text = "处理";
        }
 public CannyEdgeDetectProcessor()
     : base()
 {
     m_filter = new CannyEdgeDetector();
 }
        // =========================================================
        private void Edge_detectFunc(ref Bitmap frame, int par_int)
        {
            frame = Grayscale.CommonAlgorithms.RMY.Apply(frame);	// Make gray
            switch (par_int) {
                case 1:
                    SobelEdgeDetector SobelFilter = new SobelEdgeDetector();
                    SobelFilter.ApplyInPlace(frame);
                    break;

                case 2:
                    DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector();
                    DifferenceFilter.ApplyInPlace(frame);
                    break;

                case 3:
                    HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector();
                    HomogenityFilter.ApplyInPlace(frame);
                    break;

                case 4:
                    // can we not have references to canny in the code. gives me ptsd flashbacks
                    CannyEdgeDetector Nightmare = new CannyEdgeDetector();
                    // apply the filter
                    Nightmare.ApplyInPlace(frame);
                    break;

                default:
                    HomogenityEdgeDetector filter = new HomogenityEdgeDetector();
                    filter.ApplyInPlace(frame);
                    break;
            }
            GrayscaleToRGB RGBfilter = new GrayscaleToRGB();	// back to color format
            frame = RGBfilter.Apply(frame);
        }
        public Bitmap Detect(Bitmap bitmap)
        {
            Bitmap grayscaleBitmap = Grayscale.CommonAlgorithms.BT709.Apply(bitmap);

            IFilter smoothingFilter = null;
            switch (_smoothMode)
            {
                case "None": smoothingFilter = null; break;
                case "Mean": smoothingFilter = new Mean(); break;
                case "Median": smoothingFilter = new Median(); break;
                case "Conservative": smoothingFilter = new ConservativeSmoothing(); break;
                case "Adaptive": smoothingFilter = new AdaptiveSmoothing(); break;
                case "Bilateral": smoothingFilter = new BilateralSmoothing(); break;
            }
            Bitmap smoothBitmap = smoothingFilter != null ? smoothingFilter.Apply(grayscaleBitmap) : grayscaleBitmap;

            IFilter edgeFilter = null;
            switch (_edgeMode)
            {
                case "Homogenity": edgeFilter = new HomogenityEdgeDetector(); break;
                case "Difference": edgeFilter = new DifferenceEdgeDetector(); break;
                case "Sobel": edgeFilter = new SobelEdgeDetector(); break;
                case "Canny": edgeFilter = new CannyEdgeDetector(); break;
            }
            Bitmap edgeBitmap = edgeFilter != null ? edgeFilter.Apply(smoothBitmap) : smoothBitmap;

            IFilter threshholdFilter = new Threshold(_threshold);
            Bitmap thresholdBitmap = _threshold == 0 ? edgeBitmap : threshholdFilter.Apply(edgeBitmap);

            BlobCounter blobCounter = new BlobCounter();
            blobCounter.FilterBlobs = true;
            blobCounter.MinHeight = _minHeight;
            blobCounter.MinWidth = _minWidth;
            blobCounter.ProcessImage(thresholdBitmap);
            Blob[] blobs = blobCounter.GetObjectsInformation();

            Bitmap outputBitmap = new Bitmap(thresholdBitmap.Width, thresholdBitmap.Height, PixelFormat.Format24bppRgb);
            Graphics bitmapGraphics = Graphics.FromImage(outputBitmap);
            Bitmap inputBitmap = null;
            switch (_drawMode)
            {
                case "Original": inputBitmap = bitmap; break;
                case "Grayscale": inputBitmap = grayscaleBitmap; break;
                case "Smooth": inputBitmap = smoothBitmap; break;
                case "Edge": inputBitmap = edgeBitmap; break;
                case "Threshold": inputBitmap = thresholdBitmap; break;
            }
            if (inputBitmap != null)
                bitmapGraphics.DrawImage(inputBitmap, 0, 0);

            Pen nonConvexPen = new Pen(Color.Red, 2);
            Pen nonRectPen = new Pen(Color.Orange, 2);
            Pen cardPen = new Pen(Color.Blue, 2);

            SimpleShapeChecker shapeChecker = new SimpleShapeChecker();
            List<IntPoint> cardPositions = new List<IntPoint>();

            for (int i = 0; i < blobs.Length; i++)
            {
                List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);
                List<IntPoint> corners;

                if (shapeChecker.IsConvexPolygon(edgePoints, out corners))
                {
                    PolygonSubType subType = shapeChecker.CheckPolygonSubType(corners);

                    if ((subType == PolygonSubType.Parallelogram || subType == PolygonSubType.Rectangle) && corners.Count == 4)
                    {
                        // Check if its sideways, if so rearrange the corners so it's vertical.
                        RearrangeCorners(corners);

                        // Prevent detecting the same card twice by comparing distance against other detected cards.
                        bool sameCard = false;
                        foreach (IntPoint point in cardPositions)
                        {
                            if (corners[0].DistanceTo(point) < _minDistance)
                            {
                                sameCard = true;
                                break;
                            }
                        }
                        if (sameCard)
                            continue;

                        // Hack to prevent it from detecting smaller sections of the card instead of the whole card.
                        if (GetArea(corners) < _minArea)
                            continue;

                        cardPositions.Add(corners[0]);

                        bitmapGraphics.DrawPolygon(cardPen, ToPointsArray(corners));
                    }
                    else
                    {
                        foreach (IntPoint point in edgePoints.Take(300))
                        {
                            bitmapGraphics.DrawEllipse(nonRectPen, point.X, point.Y, 1, 1);
                        }
                    }
                }
                else
                {
                    foreach (IntPoint point in edgePoints.Take(300))
                    {
                        bitmapGraphics.DrawEllipse(nonConvexPen, point.X, point.Y, 1, 1);
                    }
                }
            }

            bitmapGraphics.Dispose();
            nonConvexPen.Dispose();
            nonRectPen.Dispose();
            cardPen.Dispose();

            return outputBitmap;
        }
Example #16
0
        /// <summary>
        /// <para>Pulls the image</para>
        /// <para>Runs the ocr on it</para>
        /// <para>fills in the blanks</para>
        /// <para>submits the page</para>
        /// </summary>
        /// <param name="challenge"></param>
        /// <param name="cancellationToken"></param>
        /// <param name="answer"></param>
        /// <returns></returns>
        private bool SolveCaptcha( Uri challenge, CancellationToken cancellationToken, out String answer ) {
            answer = null;
            var tesseractEngine = this.TesseractEngine;
            if ( null == tesseractEngine ) {
                return false;
            }

            var captchaData = this.PullCaptchaData( challenge );

            if ( captchaData.ImageUri == null ) {
                captchaData.Status = CaptchaStatus.NoImageFoundToBeSolved;
                this.UpdateCaptchaData( captchaData );
                return false;
            }

            Console.WriteLine( Resources.Uber_SolveCaptcha_Attempting_OCR_on__0_, captchaData.ImageUri.AbsolutePath );

            captchaData.Status = CaptchaStatus.SolvingImage;
            this.UpdateCaptchaData( captchaData );

            var folder = new Folder( Path.GetTempPath() );

            Document document;
            folder.TryGetTempDocument( document: out document, extension: "png" );

            this.PictureBoxChallenge.Image.Save( document.FullPathWithFileName, ImageFormat.Png );

            var aforgeImage = AForge.Imaging.Image.FromFile( document.FullPathWithFileName );

            var smoothing = new ConservativeSmoothing();

            var cannyEdgeDetector = new CannyEdgeDetector();

            cannyEdgeDetector.Apply( aforgeImage );

            aforgeImage.Save( document.FullPathWithFileName, ImageFormat.Png );

            this.PictureBoxChallenge.ImageLocation = document.FullPathWithFileName;

            this.PictureBoxChallenge.Load();

            this.Throttle( Seconds.Ten );

            using ( var img = Pix.LoadFromFile( document.FullPathWithFileName ).Deskew() ) {

                using ( var page = tesseractEngine.Process( img, PageSegMode.SingleLine ) ) {

                    answer = page.GetText();

                    var paragraph = new Paragraph( answer );

                    answer = new Sentence( paragraph.ToStrings( " " ) ).ToStrings( " " );

                    FluentTimers.Create( Minutes.One, () => document.Delete() ).AndStart();

                    if ( !String.IsNullOrWhiteSpace( answer ) ) {
                        captchaData.Status = CaptchaStatus.SolvedChallenge;
                        this.UpdateCaptchaData( captchaData );
                        return true;
                    }

                    return false;
                }
            }
        }
Example #17
0
        // =========================================================
        private void Edge_detectFunc(ref Bitmap frame, int par_int, double par_d, int par_R, int par_G, int par_B)
        {
            frame = Grayscale.CommonAlgorithms.RMY.Apply(frame);	// Make gray
            switch (par_int)
            {
                case 1:
                    SobelEdgeDetector SobelFilter = new SobelEdgeDetector();
                    SobelFilter.ApplyInPlace(frame);
                    break;

                case 2:
                    DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector();
                    DifferenceFilter.ApplyInPlace(frame);
                    break;

                case 3:
                    HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector();
                    HomogenityFilter.ApplyInPlace(frame);
                    break;

                case 4:
                    CannyEdgeDetector Cannyfilter = new CannyEdgeDetector();
                    // apply the MirrFilter
                    Cannyfilter.ApplyInPlace(frame);
                    break;

                default:
                    HomogenityEdgeDetector filter = new HomogenityEdgeDetector();
                    filter.ApplyInPlace(frame);
                    break;
            }
            GrayscaleToRGB RGBfilter = new GrayscaleToRGB();	// back to color format
            frame = RGBfilter.Apply(frame);
        }
Example #18
0
        private void videoNewFrame(object sender, NewFrameEventArgs args)
        {
            Bitmap temp = args.Frame.Clone() as Bitmap;

            switch (comboBox3.SelectedIndex)
            {
            case 0:

                break;

            case 1:
                temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp);
                break;

            case 2:
                temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp);
                temp = new Threshold((int)numericUpDown1.Value).Apply(temp);
                break;

            case 3:
                temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp);
                AForge.Imaging.Filters.CannyEdgeDetector filter = new AForge.Imaging.Filters.CannyEdgeDetector();
                filter.ApplyInPlace(temp);
                break;

            case 4:
                for (int i = 0; i < (int)numericUpDown2.Value; i++)
                {
                    temp = new Dilatation().Apply(temp);
                }
                break;

            case 5:
                for (int i = 0; i < (int)numericUpDown2.Value; i++)
                {
                    temp = new Erosion().Apply(temp);
                }
                break;

            case 6:
                temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp);
                AForge.Imaging.Filters.BradleyLocalThresholding filter1 = new AForge.Imaging.Filters.BradleyLocalThresholding();
                filter1.ApplyInPlace(temp);
                break;
            }

            switch (comboBox2.SelectedIndex)
            {
            case 0:
            {
                BarcodeReader reader = new BarcodeReader();
                reader.Options.CharacterSet = "UTF-8";
                Result result = reader.Decode(temp);
                if (result != null)
                {
                    if (wait == false)
                    {
                        MessageBox.Show(result.ToString());
                        wait = true;
                    }
                }
                else
                {
                    wait = false;
                }
                break;
            }

            case 1:
            {
                /*Bitmap pImg = MakeGrayscale3((Bitmap)temp);
                 * using (ZBar.ImageScanner scanner = new ZBar.ImageScanner())
                 * {
                 *  scanner.SetConfiguration(ZBar.SymbolType.None, ZBar.Config.Enable, 0);
                 *  scanner.SetConfiguration(ZBar.SymbolType.CODE39, ZBar.Config.Enable, 1);
                 *  scanner.SetConfiguration(ZBar.SymbolType.CODE128, ZBar.Config.Enable, 1);
                 *
                 *  List<ZBar.Symbol> symbols = new List<ZBar.Symbol>();
                 *  symbols = scanner.Scan((System.Drawing.Image)pImg);
                 *  if (symbols != null && symbols.Count > 0)
                 *  {
                 *      string result = string.Empty;
                 *      symbols.ForEach(s => result += s.Data);
                 *      if (wait == false)
                 *      {
                 *          MessageBox.Show(result);
                 *          wait = true;
                 *      }
                 *  }
                 *  else
                 *      wait = false;
                 * }*/
                break;
            }

            case 2:
            {
                break;
            }

            case 3:
                break;
            }
            this.pictureBox1.Image = ResizeBitmap(temp);
        }