Esempio n. 1
0
        ///''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
        public static void preprocess(Mat imgOriginal, ref Mat imgGrayscale, ref Mat imgThresh)
        {
            imgGrayscale = extractValue(imgOriginal);
            //extract value channel only from original image to get imgGrayscale

            Mat imgMaxContrastGrayscale = imgGrayscale;
            //Mat imgMaxContrastGrayscale = maximizeContrast(imgGrayscale);
            //maximize contrast with top hat and black hat

            Mat imgBlurred = new Mat();

            CvInvoke.GaussianBlur(imgMaxContrastGrayscale, imgBlurred, new Size(GAUSSIAN_BLUR_FILTER_SIZE, GAUSSIAN_BLUR_FILTER_SIZE), 0.6);
            //gaussian blur

            //adaptive threshold to get imgThresh
            CvInvoke.AdaptiveThreshold(imgBlurred, imgThresh, 255.0, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, ADAPTIVE_THRESH_BLOCK_SIZE, ADAPTIVE_THRESH_WEIGHT);

            MCvScalar tempVal = CvInvoke.Mean(imgBlurred);
            double    average = tempVal.V0;

            CvInvoke.Threshold(imgBlurred, imgThresh, 0, 255.0, Emgu.CV.CvEnum.ThresholdType.Otsu);
            CvInvoke.Erode(imgThresh, imgThresh, null, Point.Empty, 1, BorderType.Default, new MCvScalar(0));
            CvInvoke.Dilate(imgThresh, imgThresh, null, Point.Empty, 1, BorderType.Default, new MCvScalar(0));



            CvInvoke.BitwiseNot(imgThresh, imgThresh);
        }
Esempio n. 2
0
        private void BubbleDetectBtn_Click(object sender, EventArgs e)
        {
            //Applying Operations on transformed Image
            transformedImage = transformedImage.Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear);
            Image <Bgr, byte> transCopy = transformedImage.Copy();

            Emgu.CV.Util.VectorOfVectorOfPoint qtnVect = new Emgu.CV.Util.VectorOfVectorOfPoint();
            Image <Gray, byte> qtnGray = transCopy.Convert <Gray, byte>();
            Image <Gray, byte> copyG   = qtnGray.Copy();

            CvInvoke.GaussianBlur(qtnGray, qtnGray, new Size(5, 5), 0);
            CvInvoke.AdaptiveThreshold(qtnGray, qtnGray, 255, Emgu.CV.CvEnum.AdaptiveThresholdType.GaussianC, Emgu.CV.CvEnum.ThresholdType.Binary, 55, 9);
            CvInvoke.BitwiseNot(qtnGray, qtnGray);
            CvInvoke.FindContours(qtnGray, qtnVect, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple, default);


            //CIRCLE METHOD
            List <CircleF> circList = new List <CircleF>();

            Emgu.CV.Util.VectorOfVectorOfPoint test      = new Emgu.CV.Util.VectorOfVectorOfPoint();
            Emgu.CV.Util.VectorOfPoint         qtnApprox = new Emgu.CV.Util.VectorOfPoint();
            Dictionary <int, double>           qtnDict   = new Dictionary <int, double>();

            if (qtnVect.Size > 0)
            {
                for (int i = 0; i < qtnVect.Size; i++)
                {
                    double area = CvInvoke.ContourArea(qtnVect[i]);
                    if (area > 70)
                    {
                        qtnDict.Add(i, area);
                    }
                }
                var item = qtnDict.OrderByDescending(v => v.Value);  //.Take(1);

                Emgu.CV.Util.VectorOfPoint approxList = new Emgu.CV.Util.VectorOfPoint();

                foreach (var it in item)
                {
                    int    key  = Convert.ToInt32(it.Key.ToString());
                    double peri = CvInvoke.ArcLength(qtnVect[key], true);
                    CvInvoke.ApproxPolyDP(qtnVect[key], qtnApprox, 0.02 * peri, true);

                    if (qtnApprox.Size == 0)
                    {
                    }
                    else if (qtnApprox.Size > 6)
                    {
                        CircleF circle = CvInvoke.MinEnclosingCircle(qtnVect[key]);
                        Point   centre = new Point();
                        centre.X = (int)circle.Center.X;
                        centre.Y = (int)circle.Center.Y;
                        CvInvoke.Circle(transformedImage, centre, (int)circle.Radius, new MCvScalar(0, 255, 0), 2, Emgu.CV.CvEnum.LineType.Filled, 0);
                        //break;
                    }
                }
                MessageBox.Show("Bubbles Detected");
                bubbleImage.Image = transformedImage;
            }
        }
Esempio n. 3
0
        /// <summary>
        /// Pass the image through multiple filters and sort contours
        /// </summary>
        /// <param name="img">The image that will be proccessed</param>
        /// <returns>A list of Mat ROIs</returns>
        private static List <Mat> ImageProccessing(Mat img)
        {
            //Resize the image for better uniformitty throughout the code
            CvInvoke.Resize(img, img, new Size(700, 500));

            Mat imgClone = img.Clone();

            //Convert the image to grayscale
            CvInvoke.CvtColor(img, img, ColorConversion.Bgr2Gray);

            //Blur the image
            CvInvoke.GaussianBlur(img, img, new Size(5, 5), 8, 8);

            //Threshold the image
            CvInvoke.AdaptiveThreshold(img, img, 30, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 5, 6);

            //Canny the image
            CvInvoke.Canny(img, img, 8, 8);

            //Dilate the canny image
            CvInvoke.Dilate(img, img, null, new Point(-1, -1), 8, BorderType.Constant, new MCvScalar(0, 255, 255));

            //Filter the contours to only find relevent ones
            List <Mat> foundOutput = FindandFilterContours(imgClone, img);

            return(foundOutput);
        }
Esempio n. 4
0
        public void GridDetection()
        {
            // convert to gray-scaler image
            Mat image = originalImage.Mat.Clone();

            // blur the image
            CvInvoke.GaussianBlur(image, image, new Size(11, 11), 0);

            // threshold the image
            CvInvoke.AdaptiveThreshold(image, image, 255, AdaptiveThresholdType.MeanC, ThresholdType.Binary, 5, 2);
            CvInvoke.BitwiseNot(image, image);
            Mat kernel = new Mat(new Size(3, 3), DepthType.Cv8U, 1);

            Marshal.Copy(new byte[] { 0, 1, 0, 1, 1, 1, 0, 1, 0 }, 0, kernel.DataPointer, 9);
            CvInvoke.Dilate(image, image, kernel, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(255));
            FindOuterGridByFloorFill(image);
            CvInvoke.Erode(image, image, kernel, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(255));
            ImageShowCase.ShowImage(image, "biggest blob");
            VectorOfPointF lines = new VectorOfPointF();

            CvInvoke.HoughLines(image, lines, 1, Math.PI / 180, 200);


            // merging lines
            PointF[] linesArray = lines.ToArray();
            //MergeLines(linesArray, image);
            lines = RemoveUnusedLine(linesArray);

            Mat harrisResponse = new Mat(image.Size, DepthType.Cv8U, 1);

            CvInvoke.CornerHarris(image, harrisResponse, 5);

            DrawLines(lines.ToArray(), image);
            ImageShowCase.ShowImage(image, "corners");
        }
Esempio n. 5
0
        private void PreProcessImage()
        {
            if (ImagesPaths.Count < 1)
            {
                MessageBox.Show("No Image To Process!");
                return;
            }
            for (int index = 0; index <= nImages - 1; index++)
            {
                Mat GrayImage    = ImagesReadCV[index].Clone();
                Mat BlurredImage = GrayImage.Clone();
                Mat AdaptThresh  = BlurredImage.Clone();

                if (GrayImage.NumberOfChannels != 3)
                {
                    MessageBox.Show(@"Invalid Number of Channels within the Image!\nExpect 3 but got" + GrayImage.NumberOfChannels);
                    return;
                }
                CvInvoke.CvtColor(ImagesReadCV[index], GrayImage, ColorConversion.Bgr2Gray);
                CvInvoke.GaussianBlur(GrayImage, BlurredImage, new Size(3, 3), 1);
                CvInvoke.AdaptiveThreshold(BlurredImage, AdaptThresh, 255, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 11, 2);
                ImagesReadCV[index] = AdaptThresh;
            }
            MessageBox.Show("Done!");
            ReloadImageBox();
        }
Esempio n. 6
0
    private void HandleGrab(object sender, EventArgs e)
    {
        Mat image = new Mat();

        if (capture.IsOpened)
        {
            capture.Retrieve(image);
        }
        if (image.IsEmpty)
        {
            return;
        }
        Mat grayImg = image.Clone();

        CvInvoke.CvtColor(image, grayImg, ColorConversion.Bgr2Gray);
        CvInvoke.AdaptiveThreshold(grayImg, grayImg, 255, AdaptiveThresholdType.MeanC, ThresholdType.BinaryInv, 21, 11);

        VectorOfInt            ids      = new VectorOfInt();
        VectorOfVectorOfPointF corners  = new VectorOfVectorOfPointF();
        VectorOfVectorOfPointF rejected = new VectorOfVectorOfPointF();

        ArucoInvoke.DetectMarkers(image, dico, corners, ids, arucoParam, rejected);

        if (ids.Size > 0)
        {
            ArucoInvoke.DrawDetectedMarkers(image, corners, ids, new MCvScalar(0, 0, 255));
        }

        CvInvoke.Imshow("Original", image);
        CvInvoke.Imshow("Gray", grayImg);
    }
        public static Mat PreprocessImageForTesseract(Mat img)
        {
            int scalePercent = 18;
            int newWidth     = (int)img.Width * scalePercent / 100;
            int newHeight    = (int)img.Height * scalePercent / 100;

            CvInvoke.Resize(img, img, new System.Drawing.Size(newWidth, newHeight), interpolation: Inter.Area);
            img = ImageProcessor.ApplyBlur(img, 0, 3);
            Mat output = new Mat(img.Size, DepthType.Cv8U, 3);

            CvInvoke.CvtColor(img, img, ColorConversion.Bgr2Gray);
            //CvInvoke.EqualizeHist(img, img);
            CvInvoke.BitwiseNot(img, img);
            //img = ImageProcessor.CannyEdgeDetection(img, 20, 20);
            //img = ImageProcessor.ApplyErosion(img, 3);
            //CvInvoke.GaussianBlur(img, img, new System.Drawing.Size(3, 3), 0);
            CvInvoke.AdaptiveThreshold(img, img, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 11, 2);
            CvInvoke.Threshold(img, output, 0, 255, ThresholdType.Otsu);//double ret =
            //output = ImageProcessor.ApplyErosion(output, 3);
            //CvInvoke.Threshold(output, output, ret, 255, ThresholdType.Binary);
            var kernel = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new System.Drawing.Size(2, 2), new System.Drawing.Point(-1, -1));

            CvInvoke.Dilate(output, output, kernel, new System.Drawing.Point(-1, -1), 2, Emgu.CV.CvEnum.BorderType.Constant, default(MCvScalar));
            //output = ImageProcessor.ApplyDilation(output, 7);
            //CvInvoke.Invert()

            return(output);
        }
Esempio n. 8
0
        private void ImageThreshold()
        {
            if (grayImg != null)
            {
                try
                {
                    if (cboAdapEn.Checked)
                    {
                        CvInvoke.AdaptiveThreshold(grayImg, binImg, 255, (AdaptiveThresholdType)cboAdapType.SelectedItem,
                                                   (ThresholdType)comboBox1.SelectedItem,
                                                   Convert.ToInt32((2 * tbrMSize.Value) + 1),                   //require only Odd number and > 0
                                                   tbrMValue.Value);
                    }
                    else
                    {
                        CvInvoke.Threshold(grayImg, binImg, th, 255, (Emgu.CV.CvEnum.ThresholdType)comboBox1.SelectedItem);
                    }
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                    return;
                }

                pboGray.Image = binImg.ToBitmap(pboGray.Width, pboGray.Height);
            }
        }
Esempio n. 9
0
        public static Image <Gray, byte> TresholdingAdative(Image <Gray, byte> image, int blockSize = 11)
        {
            var tres = image.CopyBlank();

            CvInvoke.AdaptiveThreshold(image, tres, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, blockSize, -1);
            return(tres);
        }
Esempio n. 10
0
    public void HandleWebcamQueryFrame(object sender, EventArgs e)
    {
        if (webcam.IsOpened)
        {
            webcam.Retrieve(image);
        }
        if (image.IsEmpty)
        {
            return;
        }

        imageGray = image.Clone();
        CvInvoke.CvtColor(image, imageGray, ColorConversion.Bgr2Gray);
        if (imageGray.IsEmpty)
        {
            return;
        }

        frontFaces = frontFaceCascadeClassifier.DetectMultiScale(image: imageGray, scaleFactor: 1.1, minNeighbors: 5, minSize: new Size(MIN_FACE_SIZE, MIN_FACE_SIZE), maxSize: new Size(MAX_FACE_SIZE, MAX_FACE_SIZE));
        Debug.Log(frontFaces.Length.ToString());

        for (int i = 0; i < frontFaces.Length; i++)
        {
            CvInvoke.Rectangle(image, frontFaces[i], new MCvScalar(0, 180, 0), 0);
            Debug.Log("i: " + i.ToString());
        }

        //Nouvelle matrice qui focus sur le premier visage
        if (frontFaces.Length > 0)
        {
            image = new Mat(image, frontFaces[0]);
        }
        DisplayFrame(image);

        //Seuillage adaptatif
        Mat hierarchy = new Mat();

        CvInvoke.AdaptiveThreshold(imageGray, imageGray, maxValue, AdaptiveThresholdType.MeanC, ThresholdType.Binary, blockSize, diviser);
        CvInvoke.FindContours(imageGray, allContours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone);

        desiredContours.Clear();
        for (int i = 0; i < allContours.Size; i++)
        {
            if (CvInvoke.ContourArea(allContours[i]) > contourSizeMin && CvInvoke.ContourArea(allContours[i]) < contourSizeMax)
            {
                desiredContours.Push(allContours[i]);
            }
        }

        CvInvoke.DrawContours(image, desiredContours, -1, new MCvScalar(200, 100, 200), 2);

        //RotatedRect rotatedRect;
        //rotatedRect = CvInvoke.MinAreaRect(biggestContour);

        //rotatedRect.GetVertices();

        CvInvoke.Imshow("Webcam view Normal", image);
        CvInvoke.Imshow("Webcam view Gray", imageGray);
    }
        private Image <Gray, byte> preprocess(Image <Gray, byte> input)
        {
            Image <Gray, byte> copy = input.Copy();

            CvInvoke.Blur(copy, copy, new Size(5, 5), new Point(-1, -1));
            CvInvoke.AdaptiveThreshold(copy, copy, 255, Emgu.CV.CvEnum.AdaptiveThresholdType.GaussianC, Emgu.CV.CvEnum.ThresholdType.BinaryInv, 11, 2);
            return(copy);
        }
        public static Image <Gray, Byte> GetAdaptiveThresholdedFrame(Image <Bgr, Byte> frame)
        {
            var result = new Mat();

            CvInvoke.CvtColor(frame, result, ColorConversion.Bgr2Gray);
            CvInvoke.AdaptiveThreshold(result, result, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 15, -10);

            return(result.ToImage <Gray, Byte>());
        }
Esempio n. 13
0
 private void preencherImagemBinariaSemPlanoDeFundo()
 {
     mCopiaImagemPlanoDeFundo = mImagemDoPlanoDeFundo.Clone();
     CvInvoke.AbsDiff(mImagemColorida, mCopiaImagemPlanoDeFundo, mImagemSemPlanoDeFundo);
     CvInvoke.CvtColor(mImagemSemPlanoDeFundo, mImagemCinzaSemPlanoDeFundo, ColorConversion.Rgb2Gray);
     // CvInvoke.Threshold(mImagemCinzaSemPlanoDeFundo, mImagemBinariaSemPlanoDeFundo, ParametrosConstantes.LimiarTransformacaoParaCinza,
     //ParametrosConstantes.MaximoLimiarTransformacaoParaCinza, ThresholdType.Binary);
     CvInvoke.AdaptiveThreshold(mImagemCinzaSemPlanoDeFundo, mImagemBinariaSemPlanoDeFundo, ParametrosConstantes.MaximoLimiarTransformacaoParaCinza,
                                AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 11, 3);
 }
Esempio n. 14
0
        private void button1_Click(object sender, EventArgs e)
        {
            Image <Gray, byte> img = new Bitmap(PbSegmentation.Image).ToImage <Gray, byte>();

            CvInvoke.AdaptiveThreshold(img, img, 255,
                                       Emgu.CV.CvEnum.AdaptiveThresholdType.GaussianC,
                                       Emgu.CV.CvEnum.ThresholdType.Binary, 11, 5);

            PbSegmentation.Image = img.ToBitmap();
            Tools.Histogram(ChartSegmentation, (Bitmap)PbSegmentation.Image);
        }
Esempio n. 15
0
        private void button3_Click(object sender, EventArgs e)
        {
            Image <Gray, Byte> imgbuff = new Image <Gray, byte>(imagetest.Width, imagetest.Height);

            CvInvoke.AdaptiveThreshold(imagetest, imgbuff, 255, AdaptiveThresholdType.MeanC, ThresholdType.BinaryInv, 3, 0);


            imagetest = imgbuff;

            imageBox2.Image = imagetest;
        }
Esempio n. 16
0
        private string[] ProcessSudokuImg(string fileName)  // --> Wydzielic do innej klasy!
        {
            int height = 450;
            int width  = 450;

            string[] digits = new string[81];

            Image <Bgr, byte> image = new Image <Bgr, byte>(fileName);

            image = image.Resize(width, height, Emgu.CV.CvEnum.Inter.Linear);

            Image <Gray, byte> grayImage = image.Convert <Gray, byte>();
            Image <Gray, byte> buffer    = grayImage.Copy();

            CvInvoke.GaussianBlur(grayImage, buffer, new System.Drawing.Size(5, 5), 1);
            grayImage = buffer;
            CvInvoke.AdaptiveThreshold(grayImage, buffer, 255, Emgu.CV.CvEnum.AdaptiveThresholdType.GaussianC, Emgu.CV.CvEnum.ThresholdType.Binary, 5, 2);
            grayImage = buffer;

            // Split image into 81 parts
            Image <Gray, byte>[] fields = new Image <Gray, byte> [81];

            for (int i = 0; i < 9; i++)
            {
                for (int j = 0; j < 9; j++)
                {
                    int border = 5;
                    System.Drawing.Rectangle rect = new System.Drawing.Rectangle(i * (width / 9) + border, j * (height / 9) + border, (width / 9) - 2 * border, (height / 9) - 2 * border);
                    grayImage.ROI = rect;
                    var index = i * 9 + j;
                    fields[index] = grayImage.CopyBlank();
                    grayImage.CopyTo(fields[index]);
                    grayImage.ROI = System.Drawing.Rectangle.Empty;
                }
            }

            // Recognize digits
            using (TesseractEngine engine = new TesseractEngine(@"./tessdata", "eng", EngineMode.Default))
            {
                engine.SetVariable("tessedit_char_whitelist", "0123456789");
                int i = 0;  //iterator
                foreach (var field in fields)
                {
                    Page   page   = engine.Process(field.ToBitmap(), PageSegMode.SingleChar);
                    string result = page.GetText();
                    page.Dispose();
                    digits[i++] = result.Trim();
                    field.Dispose();
                }
            }
            image.Dispose(); grayImage.Dispose(); buffer.Dispose();
            return(digits);
        }
Esempio n. 17
0
        public static void preprocess(Mat imgOriginal, Mat imgGrayscale, Mat imgThresh)//przerobic na kontruktor??????
        {
            imgGrayscale = extractValue(imgOriginal);

            Mat imgMaxContrastGrayscale = maximizeContrast(imgGrayscale);

            Mat imgBlurred = new Mat();

            CvInvoke.GaussianBlur(imgMaxContrastGrayscale, imgBlurred, new Size(GAUSSIAN_BLUR_FILTER_SIZE, GAUSSIAN_BLUR_FILTER_SIZE), 0);

            CvInvoke.AdaptiveThreshold(imgBlurred, imgThresh, 255.0, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, ADAPTIVE_THRESH_BLOCK_SIZE, ADAPTIVE_THRESH_WEIGHT);
        }
Esempio n. 18
0
        /// <summary>
        /// Processes the infrared frame:
        /// 1. Thesholds the infrared image
        /// 2. Opens the thesholded image
        /// 3. Tracks refletive markers in the thresholded image.
        /// 4. Show infrared/thresholded image if mainwindow is present
        /// </summary>
        /// <param name="infraredFrame"> the InfraredFrame image </param>
        /// <param name="infraredFrameDataSize">Size of the InfraredFrame image data</param>
        private void ProcessIRFrame(Mat infraredFrameOrg, FrameDimension infraredFrameDimension, Mat depthFrame)
        {
            // init threshold image variable
            //            Image<Gray, Byte> thresholdImg = new Image<Gray, Byte>(infraredFrameDimension.Width, infraredFrameDimension.Height);


            using (Mat thresholdImg = new Mat(),
                   infraredFrameROI = new Mat(infraredFrameOrg, mask))

            {
                CvInvoke.Normalize(infraredFrameROI, thresholdImg, 0, 255, NormType.MinMax, DepthType.Cv8U);


                CvInvoke.AdaptiveThreshold(thresholdImg, thresholdImg, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 13, -20);


                // perform opening

                CvInvoke.MorphologyEx(thresholdImg, thresholdImg, MorphOp.Dilate, kernel, new System.Drawing.Point(-1, -1), 2, BorderType.Constant, new MCvScalar(1.0));
                CvInvoke.MorphologyEx(thresholdImg, thresholdImg, MorphOp.Erode, kernel, new System.Drawing.Point(-1, -1), 1, BorderType.Constant, new MCvScalar(1.0));



                // find controids of reflective surfaces and mark them on the image
                double[][] centroidPoints = GetvisibleData(thresholdImg, depthFrame);
                TrackedData(centroidPoints);



                // only generate writeable bitmap if the mainwindow is shown
                if (this.showWindow)
                {
                    // copy the processed image back into a writeable bitmap and dispose the EMGU image
                    if (thresholdedClicked)
                    {
                        using (DrawTrackedData(thresholdImg))
                        {
                            SetThresholdedInfraredImage(thresholdImg, infraredFrameDimension);
                        }
                    }
                    else
                    {
                        using (Mat colImg = DrawTrackedData(infraredFrameOrg))
                        {
                            //  CvInvoke.Normalize(colImg, colImg, 0, 255, NormType.MinMax, DepthType.Cv8U);
                            SetInfraredImage(colImg, infraredFrameDimension);
                        }
                    }
                }

                // cleanup
            }
        }
Esempio n. 19
0
        public static Bitmap getBinarizedBitmap(Bitmap bitmap)
        {
            var image         = new Image <Bgr, byte>(bitmap);
            var uimage        = new UMat();
            var pyrDown       = new UMat();
            var imageBynarize = image.Convert <Gray, Byte>();

            CvInvoke.CvtColor(image, uimage, ColorConversion.Bgr2Gray);
            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);
            CvInvoke.AdaptiveThreshold(imageBynarize, imageBynarize, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 255, 16);
            return(imageBynarize.ToBitmap(bitmap.Width, bitmap.Height));
        }
Esempio n. 20
0
        private void GetImageEdges()
        {
            CvInvoke.MedianBlur(workImage, workImage, 3);

            // ThresholdType.Binary, 11, 2
            CvInvoke.AdaptiveThreshold(workImage, workImage, 255, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 101, 3);

            CvInvoke.MedianBlur(workImage, workImage, 3);

            workImage = workImage.MorphologyEx(MorphOp.Dilate, kernel, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(1.0));

            CvInvoke.Canny(workImage, workImage, 80.0, 120.0);
        }
Esempio n. 21
0
    // Update is called once per frame
    void Update()
    {
        fluxVideo.Grab();
        Mat grey = new Mat();

        CvInvoke.CvtColor(image, grey, ColorConversion.Bgr2Gray);
        CvInvoke.AdaptiveThreshold(grey, grey, 255, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 21, 11);
        CvInvoke.FindContours(grey, contours, m, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
        for (int i = 0; i < contours.Size; i++)
        {
            double perimeter = CvInvoke.ArcLength(contours[i], true);
            CvInvoke.ApproxPolyDP(contours[i], approx, 0.04 * perimeter, true);
            if (approx.Size == 4)
            {
                if (CvInvoke.ContourArea(contours[i]) > 300)
                {
                    var rect = CvInvoke.BoundingRectangle(approx);
                    if (rect.Height > 0.95 * rect.Width || rect.Height < 0.95 * rect.Width)
                    {
                        candidates.Push(approx);
                        CvInvoke.DrawContours(image, contours, i, new MCvScalar(0, 255, 0), 4);
                        CvInvoke.Rectangle(image, rect, new MCvScalar(255, 0, 0));
                    }
                }
            }
        }
        for (int i = 0; i < candidates.Size; i++)
        {
            System.Drawing.PointF[] pts = new System.Drawing.PointF[4];
            pts[0] = new System.Drawing.PointF(0, 0);
            pts[1] = new System.Drawing.PointF(64 - 1, 0);
            pts[2] = new System.Drawing.PointF(64 - 1, 64 - 1);
            pts[3] = new System.Drawing.PointF(0, 64 - 1);
            VectorOfPointF perfect = new VectorOfPointF(pts);

            System.Drawing.PointF[] sample_pts = new System.Drawing.PointF[4];
            for (int ii = 0; ii < 4; ii++)
            {
                sample_pts[ii] = new System.Drawing.PointF(candidates[i][ii].X, candidates[i][ii].Y);
            }
            VectorOfPointF sample = new VectorOfPointF(sample_pts);
            var            tf     = CvInvoke.GetPerspectiveTransform(sample, perfect);

            Mat warped = new Mat();
            CvInvoke.WarpPerspective(image, warped, tf, new System.Drawing.Size(64, 64));
            CvInvoke.Imshow("yo", warped);
        }

        CvInvoke.WaitKey(24);
    }
        public static Image <Gray, byte> AdaptiveThreshold
        (
            this Image <Gray, byte> inImage
            , double maxVal = 250
            , AdaptiveThresholdType adaptiveThresholdType = AdaptiveThresholdType.MeanC
            , ThresholdType thresholdType = ThresholdType.BinaryInv
            , int blockSize = 39
            , double param1 = 4)
        {
            var outImage = inImage.Copy();

            CvInvoke.AdaptiveThreshold
                (GaussBlur(inImage), outImage, maxVal, adaptiveThresholdType, thresholdType, blockSize, param1);
            return(outImage);
        }
Esempio n. 23
0
 private void Button3_Click(object sender, EventArgs e)
 {
     imgGray = img.Convert <Gray, Byte>();
     //Converted to blurred
     CvInvoke.GaussianBlur(imgGray, imgGray, new Size(5, 5), 0);
     // using adaptive threshhold
     CvInvoke.AdaptiveThreshold(imgGray, imgGray, 255, Emgu.CV.CvEnum.AdaptiveThresholdType.GaussianC, Emgu.CV.CvEnum.ThresholdType.BinaryInv, 75, 10);
     CvInvoke.Canny(imgGray, cannyImage, 75, 200);
     cannyImage.ConvertTo(imgGray, Emgu.CV.CvEnum.DepthType.Default, -1, 0);
     Emgu.CV.Util.VectorOfVectorOfPoint vector = new Emgu.CV.Util.VectorOfVectorOfPoint();
     CvInvoke.FindContours(cannyImage, vector, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
     CvInvoke.DrawContours(img, vector, -1, new MCvScalar(240, 0, 159), 3);
     MessageBox.Show("Question Part Detected");
     sheetDetectImage.Image = img;
 }
Esempio n. 24
0
        private static Image <Gray, byte> getBinarizedImage(Bitmap bitmap)
        {
            var image   = new Image <Bgr, byte>(bitmap);
            var uimage  = new UMat();
            var pyrDown = new UMat();

            CvInvoke.CvtColor(image, uimage, ColorConversion.Bgr2Gray);
            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);
            var imageBynarize = image.Convert <Gray, Byte>().PyrUp().PyrDown();

            CvInvoke.AdaptiveThreshold(imageBynarize, imageBynarize, 255, AdaptiveThresholdType.MeanC, ThresholdType.Binary, 115, 4);
            CvInvoke.Threshold(image, image, 200, 255, ThresholdType.Otsu);
            return(imageBynarize);
        }
Esempio n. 25
0
        private void binarizeToolStripMenuItem_Click(object sender, EventArgs e)
        {
            imgGray           = imgInput.Convert <Gray, byte>();
            pictureBox1.Image = imgGray.ToBitmap();

            // Binarization
            imgBinarized = new Image <Gray, byte>(imgGray.Width, imgGray.Height, new Gray(0));

            //CvInvoke.Threshold(imgGray, imgBinarized, 50, 255, Emgu.CV.CvEnum.ThresholdType.Binary);
            //pictureBox2.Image = imgBinarized.ToBitmap();

            // adaptive threshold
            CvInvoke.AdaptiveThreshold(imgGray, imgBinarized, 255, Emgu.CV.CvEnum.AdaptiveThresholdType.GaussianC, Emgu.CV.CvEnum.ThresholdType.Binary, 5, 0.0);
            pictureBox2.Image = imgBinarized.ToBitmap();
        }
        private static Mat Preprocess(Mat image, bool isPerfectShape)
        {
            Mat outerBox = new Mat(image.Size, DepthType.Cv8U, 3);

            CvInvoke.GaussianBlur(image, image, new Size(7, 7), 0);
            CvInvoke.AdaptiveThreshold(image, outerBox, 255, AdaptiveThresholdType.MeanC, ThresholdType.Binary, 5, 2);
            CvInvoke.BitwiseNot(outerBox, outerBox);

            if (!isPerfectShape || true)
            {
                var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));
                CvInvoke.Dilate(outerBox, outerBox, element, new Point(-1, -1), 1, BorderType.Default, default(MCvScalar));
                CvInvoke.Erode(outerBox, outerBox, element, new Point(-1, -1), 1, BorderType.Default, default(MCvScalar));
            }
            return(outerBox);
        }
Esempio n. 27
0
        ///''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
        public static void preprocess(Mat imgOriginal, ref Mat imgGrayscale, ref Mat imgThresh)
        {
            imgGrayscale = extractValue(imgOriginal);
            //extract value channel only from original image to get imgGrayscale

            Mat imgMaxContrastGrayscale = maximizeContrast(imgGrayscale);
            //maximize contrast with top hat and black hat

            Mat imgBlurred = new Mat();

            CvInvoke.GaussianBlur(imgMaxContrastGrayscale, imgBlurred, new Size(GAUSSIAN_BLUR_FILTER_SIZE, GAUSSIAN_BLUR_FILTER_SIZE), 0);
            //gaussian blur

            //adaptive threshold to get imgThresh
            CvInvoke.AdaptiveThreshold(imgBlurred, imgThresh, 255.0, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, ADAPTIVE_THRESH_BLOCK_SIZE, ADAPTIVE_THRESH_WEIGHT);
        }
        private void AdaptiveThresholdingBarButtonItem_ItemClick(object sender, DevExpress.XtraBars.ItemClickEventArgs e)
        {
            Directory.CreateDirectory(@"D:\Eighth Semester\HandVeinPattern\\RuntimeDirectory");

            Details.adaptivethreshold = new Mat();

            CvInvoke.AdaptiveThreshold(Details.whitebalance, Details.adaptivethreshold, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 11, -3);

            Step3PictureEdit.Image = Details.adaptivethreshold.Bitmap;

            CvInvoke.Imwrite(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\AdaptiveThreshold.jpg", Details.adaptivethreshold);

            ImageProcessingProgressBarControl.PerformStep();

            ImageProcessingProgressBarControl.Update();
        }
        public Bitmap AdaptiveBinarization(string imageSavePath, int sizeOfNbPixels, int subFromMean)
        {
            Mat img = CvInvoke.Imread(imageSavePath, 0);

            Mat output = new Mat();

            CvInvoke.AdaptiveThreshold(img, output, 255,
                                       AdaptiveThresholdType.GaussianC, ThresholdType.Binary, sizeOfNbPixels, subFromMean);

            CvInvoke.Imshow("src", img);
            CvInvoke.Imshow("Gaussian", output);

            Bitmap newMap = output.ToBitmap();

            newMap.Save("./Tesseract Ocr/testImage111.png");
            return(newMap);
        }
Esempio n. 30
0
        /// <summary>
        /// Pass the image through multiple filters and sort contours
        /// </summary>
        /// <param name="img">The image that will be proccessed</param>
        /// <returns>A list of Mat ROIs</returns>
        private static Mat ImageProccessing(Mat img)
        {
            //Resize the image for better uniformitty throughout the code
            CvInvoke.Resize(img, img, new System.Drawing.Size(600, 800));

            Mat imgClone = img.Clone();

            //Convert the image to grayscale
            CvInvoke.CvtColor(img, img, ColorConversion.Bgr2Gray);

            //Blur the image
            CvInvoke.GaussianBlur(img, img, new System.Drawing.Size(3, 3), 4, 4);

            CvInvoke.Imshow("GaussianBlur", img);
            CvInvoke.WaitKey(0);

            //Threshold the image
            CvInvoke.AdaptiveThreshold(img, img, 100, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 5, 6);

            CvInvoke.Imshow("Thereshold", img);
            CvInvoke.WaitKey(0);

/*            //Canny the image
 *          CvInvoke.Canny(img, img, 75, 100);
 *
 *          CvInvoke.Imshow("Canny", img);
 *          CvInvoke.WaitKey(0);*/

            /*            //Dilate the canny image
             *          CvInvoke.Dilate(img, img, null, new System.Drawing.Point(-1, -1), 8, BorderType.Constant, new MCvScalar(0, 255, 255));
             *
             *          CvInvoke.Imshow("Dilate", img);
             *          CvInvoke.WaitKey(0);*/

            //Filter the contours to only find relevent ones

            /*            List<Mat> foundOutput = FindandFilterContours(imgClone, img);
             *
             *          for (int i = 0; i < foundOutput.Count; i++)
             *          {
             *              CvInvoke.Imshow("Found Output", foundOutput[i]);
             *              CvInvoke.WaitKey(0);
             *          }*/

            return(img);
        }