Esempio n. 1
0
        private void EdgeDetectImg(ref Bitmap frame)
        {
            frame = Grayscale.CommonAlgorithms.RMY.Apply(frame);    // Make gray
            switch (EdgeDetectValue)
            {
            case 1:
                SobelEdgeDetector SobelFilter = new SobelEdgeDetector();
                SobelFilter.ApplyInPlace(frame);
                break;

            case 2:
                DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector();
                DifferenceFilter.ApplyInPlace(frame);
                break;

            case 3:
                HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector();
                HomogenityFilter.ApplyInPlace(frame);
                break;

            case 4:
                CannyEdgeDetector Cannyfilter = new CannyEdgeDetector();
                // apply the MirrFilter
                Cannyfilter.ApplyInPlace(frame);
                break;

            default:
                HomogenityEdgeDetector filter = new HomogenityEdgeDetector();
                filter.ApplyInPlace(frame);
                break;
            }
            GrayscaleToRGB RGBfilter = new GrayscaleToRGB();    // back to color format

            frame = RGBfilter.Apply(frame);
        }
Esempio n. 2
0
        private void timerSampleMaker_Tick(object sender, EventArgs e)
        {
            if (_sourceImage != null)
            {
                try
                {
                    // Loading some file
                    Bitmap _sample = (Bitmap)AForge.Imaging.Image.Clone(_sourceImage);

                    // We must convert it to grayscale because
                    // the filter accepts 8 bpp grayscale images

                    Grayscale GF           = new Grayscale(0.2125, 0.7154, 0.0721);
                    Bitmap    GSampleImage = GF.Apply(_sample);

                    // Detecting image edges and saving the result
                    CannyEdgeDetector CED = new CannyEdgeDetector(0, 70);
                    CED.ApplyInPlace(GSampleImage);

                    Bitmap _temp = new Bitmap(320, 240);
                    _temp = GSampleImage;
                    pictureBoxSource.Image = _temp;

                    ProcessByImage(_temp);
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }
            }
        }
        /// <summary>
        /// Classify user's image
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void buttonClassify_Click(object sender, EventArgs e)
        {
            //Create bitmap for recognition users's image
            Bitmap image = (Bitmap)(pictureBox1.Image);

            //Create Canny detector for contours
            CannyEdgeDetector filterCanny = new CannyEdgeDetector();

            //Detect contour on image
            filterCanny.ApplyInPlace(image);

            //Transform image in feature vector
            double[] featureVector = (bagOfContourFragments as ITransform <Bitmap, double[]>).Transform(image);

            //SVM decide from which class this image
            string animal = GetAnimalClass(this.multiSVM.Decide(featureVector));

            //display this information for user
            label2.Text = "This is: " + animal + "?";

            //Show buttons to analyse correct detect
            labelCorrect.Visible     = true;
            buttonCorrectYes.Visible = true;
            buttonCorrectNo.Visible  = true;
        }
Esempio n. 4
0
        void CannyToolStripMenuItemClick(object sender, EventArgs e)
        {
            //jika gambar kosong/null maka akan mengembalikan nilai kosong/null
            if (gambar == null)
            {
                return;
            }
            //clone variable gambar ke variable gambar2 dan menginisiasi class Rectangle
            //dan menerapkan format pixel 24bppRgb
            gambar2 = gambar.Clone(new Rectangle(0, 0, gambar.Width, gambar.Height),
                                   System.Drawing.Imaging.PixelFormat.Format24bppRgb);
            //convert ke grayscale terlebih dahulu lalu aplikasikan filter Canny.
            //inisiasi filter grayscale disertai argumen
            Grayscale grayscaleF = new Grayscale(0.2125, 0.7154, 0.0721);

            //aplikasikan filter grayscale ke gambar2
            gambar2 = grayscaleF.Apply(gambar2);
            //inisiasi filter Canny
            CannyEdgeDetector cannyEdgeDetector = new CannyEdgeDetector();

            //aplikasikan filter pada gambar2
            cannyEdgeDetector.ApplyInPlace(gambar2);
            //tampilkan pada picturebox2
            pictureBox2.Image = gambar2;
        }
        private Bitmap GetEdgedImage(WriteableBitmap writeableBitmap)
        {
            var edgeFilter   = new CannyEdgeDetector(255, 0);
            var smoothFilter = new Median();
            var grayFilter   = new Grayscale(0.2125, 0.7154, 0.0721);
            var bitmap       = (Bitmap)writeableBitmap;

            bitmap = grayFilter.Apply(bitmap);
            smoothFilter.ApplyInPlace(bitmap);
            edgeFilter.ApplyInPlace(bitmap);

            return(bitmap);
        }
Esempio n. 6
0
        public static List <Rectangle> FindRectangles(Bitmap source)
        {
            Bitmap            canny        = Grayscale.CommonAlgorithms.RMY.Apply(source);
            CannyEdgeDetector edgeDetector = new CannyEdgeDetector(5, 20);

            edgeDetector.ApplyInPlace(canny);

            BlobCounter blobCounter = new BlobCounter
            {
                FilterBlobs = true,
                MinWidth    = 5,
                MinHeight   = 5
            };

            blobCounter.ProcessImage(canny);
            Blob[]           blobs      = blobCounter.GetObjectsInformation();
            List <Rectangle> rectangles = new List <Rectangle>();

            SimpleShapeChecker shapeChecker = new SimpleShapeChecker();

            for (int i = 0; i < blobs.Length; i++)
            {
                List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);

                if (shapeChecker.IsConvexPolygon(edgePoints, out List <IntPoint> corners))
                {
                    if (blobs[i].Rectangle.Width > canny.Width / 5.3 && blobs[i].Rectangle.Width <canny.Width / 4.41 && canny.Height / 10.3> blobs[i].Rectangle.Height && blobs[i].Rectangle.Height > canny.Height / 20.5)
                    {
                        rectangles.Add(blobs[i].Rectangle);
                    }
                }
            }

            // order by descending area
            rectangles.Sort((a, b) =>
            {
                if (a.Width * a.Height > b.Width * b.Height)
                {
                    return(1);
                }
                else if (a.Width * a.Height < b.Width * b.Height)
                {
                    return(-1);
                }
                return(0);
            });

            return(rectangles);
        }
Esempio n. 7
0
        void FinalFrame_NewFrame(object sender, NewFrameEventArgs eventArgs)
        {
            video = (Bitmap)eventArgs.Frame.Clone();
            Bitmap video2 = (Bitmap)eventArgs.Frame.Clone();

            if (mode == 1)
            {
                Grayscale         gray   = new Grayscale(0.2125, 0.7154, 0.0721);
                Bitmap            video3 = gray.Apply(video2);
                CannyEdgeDetector canny  = new CannyEdgeDetector(0, 70);
                canny.ApplyInPlace(video3);
                pictureBox2.Image = video3;
            }
            pictureBox1.Image = video;
        }
        /// <summary>
        /// Called when videoPlayer receives a new frame.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="image"></param>
        private void videoPlayer_NewFrame(object sender, ref Bitmap image)
        {
            // convert image to grayscale
            var gray  = new GrayscaleBT709();
            var frame = gray.Apply(image);

            // threshold image to only keep light pixels
            var threshold = new Threshold(125);

            threshold.ApplyInPlace(frame);

            // blank out everything but the road
            var horizonY = (int)(image.Height * 0.65);
            var fill     = new CanvasFill(new Rectangle(0, 0, image.Width, horizonY), Color.Red);

            fill.ApplyInPlace(frame);

            // detect edges
            var edgeDetector = new CannyEdgeDetector();

            edgeDetector.ApplyInPlace(frame);

            // do a hough line transformation, which will search for straight lines in the frame
            var transform = new HoughLineTransformation();

            transform.ProcessImage(frame);
            var rawLines = transform.GetMostIntensiveLines(50);

            // only keep non-horizontal lines that cross the horizon at the vanishing point
            var lines = from l in rawLines
                        let range = new Range(-75, -65)
                                    where range.IsInside(l.Radius) &&
                                    (l.Theta <= 85 || l.Theta >= 95)
                                    select l;

            // show the edge detection view in the bottom left box
            edgeBox.Image = frame;

            // show the lane detection view in the bottom right box
            var laneImg = new Bitmap(image.Width, image.Height);

            Utility.DrawHoughLines(lines, laneImg, Color.White, 1);
            laneBox.Image = laneImg;

            // draw the lanes on the main camera image too
            Utility.DrawHoughLines(lines, image, Color.LightGreen, 2);
        }
 public bool ApplyCannyEdgeDetector()
 {
     if (currentImage != null)
     {
         try
         {
             CannyEdgeDetector filter = new CannyEdgeDetector();
             filter.ApplyInPlace(currentImage);
             return(true);
         }
         catch (Exception e)
         {
             MessageBox.Show(e.ToString());
         }
     }
     return(false);
 }
        private WriteableBitmap FindPlate(IEnumerable <Rect> rects, WriteableBitmap image)
        {
            WriteableBitmap bestCandidate = null;

            foreach (var rect in rects)
            {
                var croppedImage = image.Crop(rect);
                var edgeFilter   = new CannyEdgeDetector();
                var smoothFilter = new Median();
                var grayFilter   = new Grayscale(0.2125, 0.7154, 0.0721);
                var blobCounter  = new BlobCounter();
                var cutTop       = croppedImage.PixelHeight * 0.3;

                croppedImage = croppedImage.Crop(new Rect(0, cutTop, croppedImage.PixelWidth, croppedImage.PixelHeight));

                var bitmap    = (Bitmap)croppedImage;
                var grayImage = grayFilter.Apply(bitmap);

                bitmap = smoothFilter.Apply(grayImage);
                edgeFilter.ApplyInPlace(bitmap);
                blobCounter.ProcessImage(bitmap);

                var blobs         = blobCounter.GetObjectsInformation();
                var possibleChars = new List <Rectangle>();

                foreach (var blob in blobs)
                {
                    var objRectangle = blob.Rectangle;
                    var ratio        = (double)objRectangle.Height / (double)objRectangle.Width;

                    if (ratio >= 1.16d && ratio <= 6.3d)
                    {
                        possibleChars.Add(objRectangle);
                    }
                }

                if (possibleChars.Count == 0)
                {
                    continue;
                }

                bestCandidate = croppedImage;
            }

            return(bestCandidate);
        }
        public void HughCircleTransform()
        {
            Grayscale filterGrayscale = new Grayscale(0.2125, 0.7154, 0.0721);

            ImageBitmap8pp = filterGrayscale.Apply(ImageBitmap);

            Blur filterBlur = new Blur();

            filterBlur.ApplyInPlace(ImageBitmap8pp);

            //ContrastStretch filterContrast = new ContrastStretch();
            //filterContrast.ApplyInPlace(ImageBitmap8pp);

            ImageBitmap8pp.Save("test0.bmp");

            CannyEdgeDetector filter = new CannyEdgeDetector();

            filter.ApplyInPlace(ImageBitmap8pp);

            HoughCircleTransformation circleTransform = new HoughCircleTransformation(35);

            circleTransform.LocalPeakRadius = 300;
            circleTransform.ProcessImage(ImageBitmap8pp);
            Bitmap houghCirlceImage = circleTransform.ToBitmap();

            HoughCircle[] circles = circleTransform.GetCirclesByRelativeIntensity(0.6);

            foreach (HoughCircle circle in circles)
            {
                Pen redPen = new Pen(System.Drawing.Color.Red, 3);
                using (var graphics = Graphics.FromImage(ImageBitmap))
                {
                    graphics.DrawEllipse(redPen, circle.X - circle.Radius, circle.Y - circle.Radius, circle.Radius * 2, circle.Radius * 2);
                }
            }

            ImageBitmap8pp.Save("test1.bmp");
            houghCirlceImage.Save("test2.bmp");
            ImageBitmap.Save("test3.bmp");
        }
Esempio n. 12
0
        /// <summary>
        /// Detect the highway lane boundaries.
        /// </summary>
        /// <param name="image">The camera frame to process</param>
        /// <returns>The detected lane lines in the frame</returns>
        private HoughLine[] DetectLaneLines(Bitmap image)
        {
            // convert image to grayscale
            var frame = Grayscale.CommonAlgorithms.BT709.Apply(image);

            // threshold image to only keep light pixels
            var threshold = new Threshold(125);

            threshold.ApplyInPlace(frame);

            // blank out everything but the road
            var horizonY = (int)(image.Height * 0.65);
            var fill     = new CanvasFill(new Rectangle(0, 0, image.Width, horizonY), Color.Red);

            fill.ApplyInPlace(frame);

            // detect edges
            var edgeDetector = new CannyEdgeDetector();

            edgeDetector.ApplyInPlace(frame);

            // do a hough line transformation, which will search for straight lines in the frame
            var transform = new HoughLineTransformation();

            transform.ProcessImage(frame);
            var rawLines = transform.GetMostIntensiveLines(50);

            // only keep non-horizontal lines that cross the horizon at the vanishing point
            var lines = from l in rawLines
                        let range = new Range(-75, -65)
                                    where range.IsInside(l.Radius) &&
                                    (l.Theta <= 85 || l.Theta >= 95)
                                    select l;

            // show the edge detection view in the bottom left box
            edgeBox.Image = (Bitmap)frame.Clone();

            // return lines
            return(lines.ToArray());
        }
Esempio n. 13
0
        private void btnMatch_Click(object sender, EventArgs e)
        {
            OpenFileDialog _openFile = new OpenFileDialog();

            if (_openFile.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                string   FileName    = _openFile.FileName;
                FileInfo _sourceFile = new FileInfo(FileName);

                // Loading some file
                using (Bitmap SampleImage = (Bitmap)AForge.Imaging.Image.FromFile(FileName))
                {
                    // We must convert it to grayscale because
                    // the filter accepts 8 bpp grayscale images

                    Grayscale GF = new Grayscale(0.2125, 0.7154, 0.0721);
                    using (Bitmap GSampleImage = GF.Apply(SampleImage))
                    {
                        // Detecting image edges and saving the result
                        CannyEdgeDetector CED = new CannyEdgeDetector(0, 70);
                        CED.ApplyInPlace(GSampleImage);
                        GSampleImage.Save("tempEDGED" + _sourceFile.Name, System.Drawing.Imaging.ImageFormat.Jpeg);

                        //Create Template according to specific Size
                        string _sourceImagePath             = Application.StartupPath + "\\TemporaryImage\\";
                        System.Drawing.Bitmap _createdImage = new Bitmap("tempEDGED" + _sourceFile.Name);
                        System.Drawing.Size   _newSize      = new System.Drawing.Size(320, 240);
                        System.Drawing.Bitmap _testImage    = new Bitmap(_createdImage, _newSize);

                        FileInfo _templateFileInfo = new FileInfo(FileName);
                        _testImage.Save(_sourceImagePath + _templateFileInfo.Name, ImageFormat.Jpeg);

                        //Remove Temp Grayscale Image
                        File.Delete("testBW.jpg");

                        Process(_sourceImagePath + "\\" + _templateFileInfo.Name);
                    }
                }
            }
        }
        public Bitmap edgedetection(Bitmap n)
        {
            try
            {
                Bitmap            nn     = GreyImage(n);
                CannyEdgeDetector filter = new CannyEdgeDetector();
                // apply the filter
                filter.ApplyInPlace(nn);

                return(nn);
            }

            catch (AForge.Imaging.UnsupportedImageFormatException)
            {
                // create filter
                CannyEdgeDetector filter = new CannyEdgeDetector();
                // apply the filter
                filter.ApplyInPlace(n);

                return(n);
            }
        }
Esempio n. 15
0
        public void CreateTemplate(string FileName)
        {
            UseWaitCursor = true;
            FileInfo _SourceImage = new FileInfo(FileName);

            // Loading some file
            using (Bitmap SampleImage = (Bitmap)AForge.Imaging.Image.FromFile(FileName))
            {
                // We must convert it to grayscale because
                // the filter accepts 8 bpp grayscale images
                Grayscale GF = new Grayscale(0.2125, 0.7154, 0.0721);
                using (Bitmap GSampleImage = GF.Apply(SampleImage))
                {
                    // Detecting image edges and saving the result
                    CannyEdgeDetector CED = new CannyEdgeDetector(0, 70);
                    CED.ApplyInPlace(GSampleImage);
                    GSampleImage.Save("testEDGED.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                    //Create Template according to specific Size
                    string _templateSource = Application.StartupPath + "\\Template\\";
                    System.Drawing.Bitmap _createdImage = new Bitmap("testEDGED.jpg");
                    System.Drawing.Size   _newSize      = new System.Drawing.Size(320, 240);
                    System.Drawing.Bitmap _testImage    = new Bitmap(_createdImage, _newSize);

                    FileInfo _templateFileInfo = new FileInfo(FileName);
                    _testImage.Save(_templateSource + _templateFileInfo.Name, ImageFormat.Jpeg);

                    lblCreatingStatus.Text   = "New Template Created";
                    pictureBoxTemplate.Image = _testImage;
                    pictureBoxSource.Image   = new Bitmap(FileName);

                    //Remove Temp Grayscale Image
                    File.Delete("testBW.jpg");
                }
            }
            UseWaitCursor = false;
        }
Esempio n. 16
0
        // =========================================================
        private void Edge_detectFunc(ref Bitmap frame, int par_int)
        {
            frame = Grayscale.CommonAlgorithms.RMY.Apply(frame);        // Make gray
            switch (par_int)
            {
            case 1:
                SobelEdgeDetector SobelFilter = new SobelEdgeDetector();
                SobelFilter.ApplyInPlace(frame);
                break;

            case 2:
                DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector();
                DifferenceFilter.ApplyInPlace(frame);
                break;

            case 3:
                HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector();
                HomogenityFilter.ApplyInPlace(frame);
                break;

            case 4:
                // can we not have references to canny in the code. gives me ptsd flashbacks
                CannyEdgeDetector Nightmare = new CannyEdgeDetector();
                // apply the filter
                Nightmare.ApplyInPlace(frame);
                break;

            default:
                HomogenityEdgeDetector filter = new HomogenityEdgeDetector();
                filter.ApplyInPlace(frame);
                break;
            }
            GrayscaleToRGB RGBfilter = new GrayscaleToRGB();    // back to color format

            frame = RGBfilter.Apply(frame);
        }
Esempio n. 17
0
        public static List <Rectangle> FindRectangles(Bitmap source)
        {
            string sourcePath = Path.Combine(Directory.GetCurrentDirectory(), "source.png");

            if (File.Exists(sourcePath))
            {
                File.Delete(sourcePath);
            }
#if DEBUG
            source.Save(sourcePath);
#endif


            Bitmap            canny        = Grayscale.CommonAlgorithms.RMY.Apply(source);
            CannyEdgeDetector edgeDetector = new CannyEdgeDetector(0, 20);
            edgeDetector.ApplyInPlace(canny);

            string cannyPath = Path.Combine(Directory.GetCurrentDirectory(), "canny.png");
            if (File.Exists(cannyPath))
            {
                File.Delete(cannyPath);
            }
#if DEBUG
            canny.Save(cannyPath);
#endif

            BlobCounter blobCounter = new BlobCounter
            {
                FilterBlobs = true,
                MinWidth    = 10,
                MinHeight   = 10
            };

            blobCounter.ProcessImage(canny);
            Blob[]           blobs      = blobCounter.GetObjectsInformation();
            List <Rectangle> rectangles = new List <Rectangle>();

            SimpleShapeChecker shapeChecker = new SimpleShapeChecker();

            for (int i = 0; i < blobs.Length; i++)
            {
                List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);

                if (shapeChecker.IsConvexPolygon(edgePoints, out List <IntPoint> corners))
                {
                    rectangles.Add(blobs[i].Rectangle);
                }
            }

            // order by descending area
            rectangles.Sort((a, b) =>
            {
                if (a.Width * a.Height > b.Width * b.Height)
                {
                    return(1);
                }
                else if (a.Width * a.Height < b.Width * b.Height)
                {
                    return(-1);
                }
                return(0);
            });

            Bitmap   output   = new Bitmap(source);
            Graphics graphics = Graphics.FromImage(output);
            Pen      red      = new Pen(Color.Red, 2);

            foreach (var rect in rectangles)
            {
                graphics.DrawRectangle(red, rect);
            }


            string outputPath = Path.Combine(Directory.GetCurrentDirectory(), "output.png");
            if (File.Exists(outputPath))
            {
                File.Delete(outputPath);
            }
#if DEBUG
            output.Save(outputPath);
#endif
            red.Dispose();
            graphics.Dispose();
            output.Dispose();

            return(rectangles);
        }
Esempio n. 18
0
        private void ProcessImage(Bitmap bitmap)
        {
            if (CBApplyCanny.Checked)
            {
                Bitmap            video2 = (Bitmap)bitmap.Clone();
                Grayscale         gray   = new Grayscale(0.2125, 0.7154, 0.0721);
                Bitmap            video3 = gray.Apply(video2);
                CannyEdgeDetector canny  = new CannyEdgeDetector(0, 70);
                canny.ApplyInPlace(video3);
                //PictureViewerEditor.Image = (Bitmap)Image.FromFile(imageFileName);
                PBCanny.Image = (Bitmap)video3.Clone();//assign the pointer back to the clone
                //if (CBFindCircles.Checked)
                //{
                //    //System.Drawing.Image returnImage = null;
                //    //returnImage = (System.Drawing.Image)CaptureBox.Image.Clone();
                //    // Clipboard.SetImage(replacementImage);
                //    Bitmap cannyImage = (Bitmap)PBCapture.Image.Clone();
                //    ProcessImage(cannyImage);
                //    return;
                //}
                GC.Collect();
                //return;
            }
            // lock image
            BitmapData bitmapData = bitmap.LockBits(
                new Rectangle(0, 0, bitmap.Width, bitmap.Height),
                ImageLockMode.ReadWrite, bitmap.PixelFormat);

            // step 1 - turn background to black
            ColorFiltering colorFilter = new ColorFiltering();

            colorFilter.Red              = new IntRange(0, 64);
            colorFilter.Green            = new IntRange(0, 64);
            colorFilter.Blue             = new IntRange(0, 64);
            colorFilter.FillOutsideRange = false;

            colorFilter.ApplyInPlace(bitmapData);

            // step 2 - locating objects
            BlobCounter blobCounter = new BlobCounter();

            blobCounter.FilterBlobs = true;
            blobCounter.MinHeight   = 5;
            blobCounter.MinWidth    = 5;

            blobCounter.ProcessImage(bitmapData);
            Blob[] blobs = blobCounter.GetObjectsInformation();
            bitmap.UnlockBits(bitmapData);
            PBCanny.Image = (Bitmap)bitmap.Clone();
            // step 3 - check objects' type and highlight
            SimpleShapeChecker shapeChecker = new SimpleShapeChecker();

            Graphics g         = Graphics.FromImage(bitmap);
            Pen      yellowPen = new Pen(Color.Yellow, 2); // circles
            Pen      redPen    = new Pen(Color.Red, 2);    // quadrilateral
            Pen      brownPen  = new Pen(Color.Brown, 2);  // quadrilateral with known sub-type
            Pen      greenPen  = new Pen(Color.Green, 2);  // known triangle
            Pen      bluePen   = new Pen(Color.Blue, 2);   // triangle

            for (int i = 0, n = blobs.Length; i < n; i++)
            {
                List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);

                AForge.Point center;
                float        radius;

                // is circle ?
                if (shapeChecker.IsCircle(edgePoints, out center, out radius))
                {
                    g.DrawEllipse(yellowPen,
                                  (float)(center.X - radius), (float)(center.Y - radius),
                                  (float)(radius * 2), (float)(radius * 2));
                }
                else
                {
                    List <IntPoint> corners;

                    // is triangle or quadrilateral
                    if (shapeChecker.IsConvexPolygon(edgePoints, out corners))
                    {
                        // get sub-type
                        PolygonSubType subType = shapeChecker.CheckPolygonSubType(corners);

                        Pen pen;

                        if (subType == PolygonSubType.Unknown)
                        {
                            pen = (corners.Count == 4) ? redPen : bluePen;
                        }
                        else
                        {
                            pen = (corners.Count == 4) ? brownPen : greenPen;
                        }

                        g.DrawPolygon(pen, ToPointsArray(corners));
                    }
                }
            }

            yellowPen.Dispose();
            redPen.Dispose();
            greenPen.Dispose();
            bluePen.Dispose();
            brownPen.Dispose();
            g.Dispose();

            // put new image to clipboard
            //Clipboard.SetDataObject(bitmap);
            // and to picture box
            PBAnalysis.Image = bitmap;

            //UpdatePictureBoxPosition();
        }
Esempio n. 19
0
        void ModifiedOutput(object sender, NewFrameEventArgs eventArgs)
        {
            return;

            Pen    blackPen = new Pen(Color.Red, 1);
            Bitmap b        = (Bitmap)eventArgs.Frame;

            if (CBApplyCanny.Checked)
            {
                Bitmap            video2 = (Bitmap)eventArgs.Frame.Clone();
                Grayscale         gray   = new Grayscale(0.2125, 0.7154, 0.0721);
                Bitmap            video3 = gray.Apply(video2);
                CannyEdgeDetector canny  = new CannyEdgeDetector(0, 70);
                canny.ApplyInPlace(video3);
                PBCapture.Image = video3;//assign the pointer back to the clone
                if (CBFindCircles.Checked)
                {
                    //System.Drawing.Image returnImage = null;
                    //returnImage = (System.Drawing.Image)CaptureBox.Image.Clone();
                    // Clipboard.SetImage(replacementImage);
                    Bitmap cannyImage = (Bitmap)PBCapture.Image.Clone();
                    ProcessImage(cannyImage);
                    return;
                }
                //PBCapture.Image

                // Create a blank bitmap with the same dimensions
                //Bitmap tempBitmap = new Bitmap(video3.Width, video3.Height);

                //// From this bitmap, the graphics can be obtained, because it has the right PixelFormat
                //Rectangle TargetBM = new Rectangle(0, 0, video3.Width, video3.Height);

                //using (var graphics = Graphics.FromImage(tempBitmap))
                //    {
                //        graphics.DrawImage(video3, TargetBM, 0, 0, TargetBM.Width, TargetBM.Height, GraphicsUnit.Pixel);
                //        graphics.DrawLine(blackPen, 0, b.Height / 2, b.Width, b.Height / 2);
                //        graphics.DrawLine(blackPen, b.Width / 2, 0, b.Width / 2, b.Height);
                //        //PBCapture.Image = video;
                //        graphics.Dispose();
                //    }
                //PBCapture.Image = tempBitmap;
                GC.Collect();
                return;
            }
            //Bitmap video = (Bitmap)eventArgs.Frame.Clone();
            //Pen blackPen = new Pen(Color.Red, 1);
            //using (var graphics = Graphics.FromImage(video))
            //{
            //    graphics.DrawLine(blackPen, 0, video.Height / 2, video.Width, video.Height / 2);
            //    graphics.DrawLine(blackPen, video.Width / 2, 0, video.Width / 2, video.Height);
            //    PBOutPut.Image = video;
            //    graphics.Dispose();
            //    GC.Collect();
            //}

            using (var graphics = Graphics.FromImage(b))
            {
                graphics.DrawLine(blackPen, 0, b.Height / 2, b.Width, b.Height / 2);
                graphics.DrawLine(blackPen, b.Width / 2, 0, b.Width / 2, b.Height);
                //PBOutPut.Image = video;
                graphics.Dispose();
                GC.Collect();
            }
            //if (processForShapes) processForShapes(b);
        }
Esempio n. 20
0
        private void skinDetectToolStripMenuItem_Click(object sender, EventArgs e)
        {
            List <List <double> > Listofvectors = new List <List <double> >();

            System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\Users\gsrip\Documents\MyDocuments\Saarthi AI and IP\Segmented\segmented.txt", append: true);
            String alphabets            = "0ABCDEFGHIJKLMNOPQRSTUVWXYZ";

            for (int index = 1; index <= 26; index++)
            {
                //for each folder select all filenames
                filenames = Directory.GetFiles(dirnames[index - 1]);
                int n = 0;
                foreach (string filename in filenames)
                {
                    //load an image in a bitmap

                    Bitmap bmplocal = new Bitmap(filename);
                    int    height = 300, width = 300;
                    bmp = new Bitmap(bmplocal, width, height);
                    pictureBox1.Image    = new Bitmap(bmp);
                    pictureBox1.SizeMode = PictureBoxSizeMode.StretchImage;
                    using (bmp)
                        using (skinBmp = new Bitmap(bmp.Width, bmp.Height))
                        {
                            //skin detection
                            for (int x = 0; x < bmp.Width; x++)
                            {
                                for (int y = 0; y < bmp.Height; y++)
                                {
                                    Color pixel = bmp.GetPixel(x, y);

                                    int red   = pixel.R;
                                    int blue  = pixel.B;
                                    int green = pixel.G;
                                    int max   = Math.Max(red, Math.Max(green, blue));
                                    int min   = Math.Min(red, Math.Min(green, blue));
                                    int rgdif = red - green;
                                    int abs   = Math.Abs(rgdif);
                                    if (red > 95 && green > 40 && blue > 20 && max - min > 15 && abs > 15 && red > green && red > blue)
                                    {
                                        skinBmp.SetPixel(x, y, pixel);
                                    }
                                }
                            }

                            pictureBox2.Image = new Bitmap(skinBmp);
                            //grayscale filter (BT709)
                            Grayscale filter1   = new Grayscale(0.2125, 0.7154, 0.0721);
                            Bitmap    newImage  = new Bitmap(bmp);
                            Bitmap    grayImage = filter1.Apply(newImage);

                            Threshold filter2 = new Threshold(100);
                            Bitmap    bwImage = filter2.Apply(grayImage);

                            Closing filter5 = new Closing();
                            filter5.ApplyInPlace(bwImage);

                            Opening filter3 = new Opening();
                            filter3.ApplyInPlace(bwImage);

                            ExtractBiggestBlob filter4           = new ExtractBiggestBlob();
                            Bitmap             biggestBlobsImage = filter4.Apply(bwImage);

                            ExtractBiggestBlob filter6            = new ExtractBiggestBlob();
                            Bitmap             biggestBlobsImage1 = filter6.Apply((Bitmap)pictureBox2.Image);

                            Bitmap orgimage  = new Bitmap(biggestBlobsImage1, 300, 300);
                            Bitmap blobimage = new Bitmap(biggestBlobsImage, 300, 300);

                            Bitmap newimage = new Bitmap(300, 300);

                            //anding the two images
                            for (int x = 0; x < 300; x++)
                            {
                                for (int y = 0; y < 300; y++)
                                {
                                    Color pixel1 = orgimage.GetPixel(x, y);
                                    Color pixel2 = blobimage.GetPixel(x, y);
                                    int   red1 = pixel1.R, red2 = pixel2.R;
                                    int   blue1 = pixel1.B, blue2 = pixel2.B;
                                    int   green1 = pixel1.G, green2 = pixel2.G;
                                    int   newred, newblue, newgreen;
                                    newred   = red1 & red2;
                                    newblue  = blue1 & blue2;
                                    newgreen = green1 & green2;
                                    Color newpixel = Color.FromArgb(newred, newgreen, newblue);

                                    newimage.SetPixel(x, y, newpixel);
                                }
                            }

                            CannyEdgeDetector filter7 = new CannyEdgeDetector();
                            Grayscale         filter  = new Grayscale(0.2125, 0.7154, 0.0721);
                            Bitmap            edges   = filter.Apply(newimage);
                            filter7.ApplyInPlace(edges);

                            pictureBox3.Image = new Bitmap(edges);
                            String location = "C:\\Users\\gsrip\\Documents\\MyDocuments\\Saarthi AI and IP\\Segmented\\";
                            location = location + alphabets[index].ToString() + "\\image";
                            newimage.Save(@location + (n++).ToString() + ".jpg");

                            List <int> featureVector = new List <int>();
                            for (int i = 0; i < 6; i++)
                            {
                                for (int j = 0; j < 6; j++)
                                {
                                    int count = 0;
                                    for (int x = i * 50; x < (i * 50) + 50; x++)
                                    {
                                        for (int y = j * 50; y < (j * 50) + 50; y++)
                                        {
                                            Color pixel = edges.GetPixel(x, y);
                                            if (pixel.R != 0 && pixel.G != 0 && pixel.B != 0)
                                            {
                                                count++;
                                            }
                                        }
                                    }
                                    featureVector.Add(count);
                                }
                            }

                            int           sumofvector       = featureVector.Sum();
                            List <double> featureVectorNorm = new List <double>();
                            foreach (var d in featureVector)
                            {
                                featureVectorNorm.Add((double)d / sumofvector);
                            }
                            Listofvectors.Add(featureVectorNorm);
                        }//end of using
                } // end of foreach filename
                foreach (var vector in Listofvectors)
                {
                    String line = index.ToString() + ": ";
                    //Console.WriteLine(value);
                    foreach (var obj in vector)
                    {
                        line = line + obj.ToString() + " ";
                        //Console.Write(value);
                    }
                    file.WriteLine(line);
                    //Console.WriteLine();
                }
            } //end of foreach index
            file.Close();
        }     //end of skindetect tool strip
Esempio n. 21
0
        public Bitmap edgeDetection()
        {
            Bitmap          colorImg      = (Bitmap)videoSourcePlayer1.GetCurrentVideoFrame();
            Grayscale       grayfilter    = new Grayscale(cr, cg, cb);
            GaussianBlur    blurFilter    = new GaussianBlur();
            GaussianSharpen sharpenFilter = new GaussianSharpen();
            Bitmap          originalImage;

            if (gausianToggle == 0)
            {
                originalImage = (Bitmap)grayfilter.Apply(colorImg);
            }
            else if (gausianToggle == 1)
            {
                originalImage = sharpenFilter.Apply((Bitmap)colorImg);
                originalImage = (Bitmap)grayfilter.Apply(originalImage);
            }
            else
            {
                originalImage = blurFilter.Apply((Bitmap)colorImg);
                originalImage = (Bitmap)grayfilter.Apply(originalImage);
            }
            switch (caseValue)
            {
            case 1:
                //canny
                scrollableImagePanel1.Image = originalImage;
                CannyEdgeDetector edgeDectector = new CannyEdgeDetector();
                edgeDectector.HighThreshold = (byte)cannyUpperThresholdSlider.Value;
                edgeDectector.LowThreshold  = (byte)cannyLowerThresholdSlider.Value;
                edgeDectector.ApplyInPlace(scrollableImagePanel1.Image);
                return((Bitmap)scrollableImagePanel1.Image);

            case 2:
                //gray scale
                scrollableImagePanel3.Image = originalImage;
                Grayscale customGrayScale = new Grayscale((cr * (graySlider.Value / 100)), (cb * (graySlider.Value / 100)), (cg * (graySlider.Value / 100)));
                originalImage = customGrayScale.Apply(colorImg);
                return(originalImage);

            case 3:
                //Black and White
                scrollableImagePanel2.Image = originalImage;
                Threshold thresholdFilter = new Threshold();
                thresholdFilter.ThresholdValue = hScrollBar1.Value;
                thresholdFilter.ApplyInPlace(scrollableImagePanel2.Image);
                return((Bitmap)scrollableImagePanel2.Image);

            case 4:
                //Mixed Color Edits
                scrollableImagePanel5.Image = colorImg;
                ChannelFiltering colorChannelFilter = new ChannelFiltering();
                colorChannelFilter.Red   = new IntRange(0, redSlider.Value);
                colorChannelFilter.Blue  = new IntRange(0, blueSlider.Value);
                colorChannelFilter.Green = new IntRange(0, greenSlider.Value);
                colorChannelFilter.ApplyInPlace((Bitmap)scrollableImagePanel5.Image);
                return((Bitmap)scrollableImagePanel5.Image);

            case 5:
                //Specific Color edits
                ColorFiltering colorFilter = new ColorFiltering();
                if (colorToggle == 1)
                {
                    Console.WriteLine("Red disabled");
                    colorFilter.Red   = new IntRange(0, 0);
                    colorFilter.Blue  = new IntRange(0, 255);
                    colorFilter.Green = new IntRange(0, 255);
                    colorFilter.Apply(colorImg);
                    originalImage = colorImg;
                    return(originalImage);
                }
                else if (colorToggle == 2)
                {
                    Console.WriteLine("Blue disabled");
                    colorFilter.Red   = new IntRange(0, 255);
                    colorFilter.Blue  = new IntRange(0, 0);
                    colorFilter.Green = new IntRange(0, 255);
                    colorFilter.Apply(colorImg);
                    originalImage = colorImg;
                    return(originalImage);
                }
                else if (colorToggle == 3)
                {
                    Console.WriteLine("Green disabled");
                    colorFilter.Red   = new IntRange(0, 255);
                    colorFilter.Blue  = new IntRange(0, 255);
                    colorFilter.Green = new IntRange(0, 0);
                    colorFilter.Apply(colorImg);
                    originalImage = colorImg;
                    return(originalImage);
                }
                else
                {
                    return(colorImg);
                }
            }
            return(originalImage);
        }
        /// <summary>
        /// This methods only for admin, and this recompute bagOfContourFragments and svm
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void buttonCompute_Click(object sender, EventArgs e)
        {
            //Accord.Math.Random.Generator.Seed = 1;

            DirectoryInfo path = new DirectoryInfo(Path.Combine(Application.StartupPath, "Resources/Res"));

            ///Create dictionary for train images
            originalTrainImages = new Dictionary <int, Bitmap>();

            int j = 0;

            int k = 0;

            foreach (DirectoryInfo classFolder in path.EnumerateDirectories())
            {
                ///Add name of folder
                string name = classFolder.Name;

                ///Upload all files in aarray
                FileInfo[] files = GetFilesByExtensions(classFolder, ".jpg", ".tif").ToArray();

                //Shuffle objects in array
                Vector.Shuffle(files);

                //For each image complite some easy operations
                for (int i = 0; i < files.Length; i++)
                {
                    //Uploat only train images
                    //70%
                    if ((i / (double)files.Length) < 0.7)
                    {
                        //Add file
                        FileInfo file = files[i];

                        //Create image from file
                        Bitmap image = (Bitmap)Bitmap.FromFile(file.FullName);

                        //Use detector
                        CannyEdgeDetector filterCanny = new CannyEdgeDetector();

                        //Apply changes
                        filterCanny.ApplyInPlace(image);

                        //Add some information of image
                        string shortName = file.Name;
                        int    imageKey  = j;

                        //Add image to dictionary
                        originalTrainImages.Add(j, image);

                        //Save correct key of class for image
                        outputsResult[j] = k;
                        j++;
                    }
                }
                //Change key of folder
                k++;
            }

            //Create teacher for svm, using Histogram Intersection
            var teacher = new MulticlassSupportVectorLearning <HistogramIntersection>()
            {
                //Add leaner params
                Learner = (param) => new SequentialMinimalOptimization <HistogramIntersection>()
                {
                    //Create kernel with optimal params
                    Kernel = new HistogramIntersection(0.25, 1),
                }
            };

            //Create KMeans algr
            var kmodes = new KModes <byte>(numberOfContour, new Hamming());

            //Create detector
            var detector = new FastRetinaKeypointDetector();

            //Create bagOfContourFragments
            bagOfContourFragments = new BagOfVisualWords(numberOfContour);

            //Learned bagOfContourFragments
            bagOfContourFragments.Learn(originalTrainImages.Values.ToArray());

            //For each iamge add inputs info
            for (int i = 0; i < originalTrainImages.Count; i++)
            {
                Bitmap image = originalTrainImages[i] as Bitmap;

                inputsInfo[i] = (bagOfContourFragments as ITransform <Bitmap, double[]>).Transform(image);
            }

            //Save condition of bagOfContourFragments
            BinarySave.WriteBinary(bagOfContourFragments);

            //Teach svm
            multiSVM = teacher.Learn(inputsInfo, outputsResult);

            //Save condition of svm
            BinarySave.WriteBinary(multiSVM);
        }
Esempio n. 23
0
        public BahtinovImage GrabBahtinov()
        {
            var    bahtinovImage = new BahtinovImage();
            Bitmap convertedSource;

            if (originalSource.Format != System.Windows.Media.PixelFormats.Gray8)
            {
                if (originalSource.Format != System.Windows.Media.PixelFormats.Gray16)
                {
                    using (var imgToConvert = ImageUtility.BitmapFromSource(originalSource, System.Drawing.Imaging.PixelFormat.Format48bppRgb)) {
                        convertedSource = new Grayscale(0.2125, 0.7154, 0.0721).Apply(imgToConvert);
                    }
                    convertedSource = ImageUtility.Convert16BppTo8Bpp(ImageUtility.ConvertBitmap(convertedSource, System.Windows.Media.PixelFormats.Gray16));
                }
                else
                {
                    convertedSource = ImageUtility.Convert16BppTo8Bpp(originalSource);
                }
            }
            else
            {
                convertedSource         = ImageUtility.BitmapFromSource(originalSource, System.Drawing.Imaging.PixelFormat.Format8bppIndexed);
                convertedSource.Palette = ImageUtility.GetGrayScalePalette();
            }

            using (var focusEllipsePen = new System.Drawing.Pen(System.Drawing.Brushes.Green, 1)) {
                using (var intersectEllipsePen = new System.Drawing.Pen(System.Drawing.Brushes.Red, 1)) {
                    var mediaColor   = backgroundColor;
                    var drawingColor = System.Drawing.Color.FromArgb(mediaColor.A, mediaColor.R, mediaColor.G, mediaColor.B);
                    using (var linePen = new System.Drawing.Pen(drawingColor, 1)) {
                        using (var bahtinovedBitmap = new Bitmap(convertedSource.Width, convertedSource.Height, System.Drawing.Imaging.PixelFormat.Format24bppRgb)) {
                            Graphics graphics = Graphics.FromImage(bahtinovedBitmap);
                            graphics.DrawImage(convertedSource, 0, 0);

                            /* Apply filters and detection*/
                            CannyEdgeDetector filter = new CannyEdgeDetector();
                            filter.GaussianSize = 10;
                            filter.ApplyInPlace(convertedSource);

                            HoughLineTransformation lineTransform = new HoughLineTransformation();
                            lineTransform.ProcessImage(convertedSource);

                            HoughLine[] lines = lineTransform.GetMostIntensiveLines(6);

                            List <Line> bahtinovLines = new List <Line>();
                            foreach (HoughLine line in lines)
                            {
                                var k = TranslateHughLineToLine(line, bahtinovedBitmap.Width, bahtinovedBitmap.Height);
                                bahtinovLines.Add(k);
                            }

                            float x1, x2, y1, y2;

                            if (bahtinovLines.Count == 6)
                            {
                                var orderedPoints = bahtinovLines.OrderBy(x => 1.0d / x.Slope).ToList();
                                var threeLines    = new List <Line>();

                                for (var i = 0; i < orderedPoints.Count(); i += 2)
                                {
                                    var l1 = orderedPoints[i];
                                    var l2 = orderedPoints[i + 1];

                                    var inter      = (l1.Intercept + l2.Intercept) / 2.0f;
                                    var slope      = (l1.Slope + l2.Slope) / 2.0f;
                                    var centerLine = Line.FromSlopeIntercept(slope, inter);
                                    threeLines.Add(centerLine);

                                    x1 = 0;
                                    x2 = convertedSource.Width;
                                    y1 = double.IsInfinity(centerLine.Slope) ? centerLine.Intercept : centerLine.Slope + centerLine.Intercept;
                                    y2 = double.IsInfinity(centerLine.Slope) ? centerLine.Intercept : (centerLine.Slope * (convertedSource.Width) + centerLine.Intercept);

                                    graphics.DrawLine(
                                        linePen,
                                        new PointF(x1, y1),
                                        new PointF(x2, y2));
                                }

                                /* Intersect outer bahtinov lines */
                                var intersection = threeLines[0].GetIntersectionWith(threeLines[2]);
                                if (intersection.HasValue)
                                {
                                    /* get orthogonale to center line through intersection */
                                    var centerBahtinovLine  = threeLines[1];
                                    var orthogonalSlope     = -1.0f / centerBahtinovLine.Slope;
                                    var orthogonalIntercept = intersection.Value.Y - orthogonalSlope * intersection.Value.X;

                                    var orthogonalCenter = Line.FromSlopeIntercept(orthogonalSlope, orthogonalIntercept);
                                    var intersection2    = centerBahtinovLine.GetIntersectionWith(orthogonalCenter);
                                    if (intersection2.HasValue && !double.IsInfinity(intersection2.Value.X))
                                    {
                                        x1 = intersection.Value.X;
                                        y1 = intersection.Value.Y;
                                        x2 = intersection2.Value.X;
                                        y2 = intersection2.Value.Y;

                                        bahtinovImage.Distance = intersection.Value.DistanceTo(intersection2.Value);

                                        var t  = bahtinovImage.Distance * 4 / bahtinovImage.Distance;
                                        var x3 = (float)((1 - t) * x1 + t * x2);
                                        var y3 = (float)((1 - t) * y1 + t * y2);

                                        var r = 10;
                                        graphics.DrawEllipse(
                                            intersectEllipsePen,
                                            new RectangleF(x3 - r, y3 - r, 2 * r, 2 * r));
                                        graphics.DrawEllipse(
                                            focusEllipsePen,
                                            new RectangleF(x2 - r, y2 - r, 2 * r, 2 * r));

                                        graphics.DrawLine(
                                            intersectEllipsePen,
                                            new PointF(x3, y3),
                                            new PointF(x2, y2));
                                    }
                                }
                            }

                            var img = ImageUtility.ConvertBitmap(bahtinovedBitmap, System.Windows.Media.PixelFormats.Bgr24);
                            convertedSource.Dispose();
                            img.Freeze();
                            bahtinovImage.Image = img;
                            return(bahtinovImage);
                        }
                    }
                }
            }
        }
Esempio n. 24
0
        private void FillPictureBoxes(ref Bitmap image)
        {
            Bitmap tmpImg  = image;
            Bitmap tmpImg2 = image;


            try
            {
                bool hasFilter = false;
                //setup resize and filtersequesce


                //resize img to fit picturebox
                ResizeBicubic resizeFilter = new ResizeBicubic(0, 0);

                resizeFilter = new ResizeBicubic(pbCapture.Width, pbCapture.Height);
                tmpImg       = resizeFilter.Apply(tmpImg);

                resizeFilter = new ResizeBicubic(pbShapes.Width, pbShapes.Height);
                tmpImg2      = resizeFilter.Apply(tmpImg2);



                FiltersSequence processingFilter = new FiltersSequence();


                //List all filters
                IFilter ConservativeSmoothingFilter = new AForge.Imaging.Filters.ConservativeSmoothing();
                IFilter InvertFilter          = new AForge.Imaging.Filters.Invert();
                IFilter HSLFilteringFilter    = new AForge.Imaging.Filters.HSLFiltering();
                IFilter SepiaFilter           = new AForge.Imaging.Filters.Sepia();
                IFilter grayscaleFilter       = new AForge.Imaging.Filters.GrayscaleBT709();
                IFilter SkeletonizationFilter = new AForge.Imaging.Filters.SimpleSkeletonization();
                IFilter pixFilter             = new AForge.Imaging.Filters.Pixellate();


                ////apply filter and process img---------------------------------------------



                if (ConservativeSmoothing)
                {
                    processingFilter.Add(ConservativeSmoothingFilter);
                    hasFilter = true;
                }

                if (Invert)
                {
                    processingFilter.Add(InvertFilter);
                    hasFilter = true;
                }

                if (HSLswitch)
                {
                    processingFilter.Add(HSLFilteringFilter);
                    hasFilter = true;
                }

                if (sepiaSwitch)
                {
                    processingFilter.Add(SepiaFilter);
                    hasFilter = true;
                }


                if (Skeletonization)
                {
                    processingFilter.Add(grayscaleFilter);
                    processingFilter.Add(SkeletonizationFilter);
                    hasFilter = true;
                }

                //apply the filter(s) to image
                if (hasFilter)
                {
                    //tmpImg = processingFilter.Apply(tmpImg);
                    tmpImg2 = processingFilter.Apply(tmpImg2);
                }

                processingFilter.Clear();


                if (bwSwitch)
                {
                    switchBandW(ref tmpImg);
                }



                if (CannyEdgeDetector)
                {
                    // create filter
                    CannyEdgeDetector filter = new CannyEdgeDetector();
                    // apply the filter
                    tmpImg = Grayscale.CommonAlgorithms.BT709.Apply(tmpImg);
                    filter.ApplyInPlace(tmpImg);


                    // image = DrawFocusArea(gsImage);
                }
                else
                {
                    // image = DrawFocusArea(image);
                }


                if (DifferenceEdgeDetector)
                {
                    DifferenceEdgeDetector dFilter = new DifferenceEdgeDetector();
                    // apply the filter
                    tmpImg = Grayscale.CommonAlgorithms.BT709.Apply(tmpImg);
                    dFilter.ApplyInPlace(tmpImg);
                }


                if (HomogenityEdgeDetector)
                {
                    HomogenityEdgeDetector hFilter = new HomogenityEdgeDetector();
                    // apply the filter
                    tmpImg = Grayscale.CommonAlgorithms.BT709.Apply(tmpImg);
                    hFilter.ApplyInPlace(tmpImg);
                }


                if (SobelEdgeDetector)
                {
                    SobelEdgeDetector hFilter = new SobelEdgeDetector();
                    // apply the filter
                    tmpImg = Grayscale.CommonAlgorithms.BT709.Apply(tmpImg);
                    hFilter.ApplyInPlace(tmpImg);

                    BlobCounter bc    = new BlobCounter(tmpImg);
                    Rectangle[] brecs = bc.GetObjectsRectangles();


                    //Graphics pg = Graphics.FromImage(tmpImg);
                    //Pen p = new Pen(Color.White, 2);

                    //foreach (Rectangle r in brecs)
                    //{
                    //    pg.DrawRectangle(p, r);
                    //}
                }



                if (findShapes)
                {
                    tmpImg = FindShapes(tmpImg, ref tmpImg2);
                    //ProcessImage(image);
                }
                else
                {
                    pbCapture.Image = tmpImg;  //set picturebox image----------------
                    pbShapes.Image  = tmpImg2; //set picturebox image----------------
                }



                // Graphics g = Graphics.FromImage(tmpImg);
                // Pen p = new Pen(Color.Red, 2);

                // Rectangle lr = new Rectangle(100, 120, 80, 40);
                //// Rectangle rr = new Rectangle(360, 220, 80, 40);

                // g.DrawRectangle(p, lr);
                // //g.DrawRectangle(p, rr);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }


            //  pbCapture.Image = tmpImg;//set picturebox image----------------
            //   pbShapes.Image = tmpImg2;//set picturebox image----------------
        }
Esempio n. 25
0
        public static Bitmap SmoothingFilter(this Bitmap sourceBitmap, MainForm form,
                                             SmoothingFilterType smoothFilter =
                                             SmoothingFilterType.Nenhum)
        {
            Bitmap inputBitmap = null;

            //Progress bar
            form.algorithmProgress = 0;

            switch (smoothFilter)
            {
            case SmoothingFilterType.Nenhum:
            {
                inputBitmap = sourceBitmap;
            } break;

            case SmoothingFilterType.Gaussiano3x3:
            {
                inputBitmap = sourceBitmap.ConvolutionFilter(
                    Matrix.Gaussian3x3, 1.0 / 16.0, 0);
            } break;

            case SmoothingFilterType.Gaussiano5x5:
            {
                inputBitmap = sourceBitmap.ConvolutionFilter(
                    Matrix.Gaussian5x5, 1.0 / 159.0, 0);
            } break;

            case SmoothingFilterType.Gaussiano7x7:
            {
                inputBitmap = sourceBitmap.ConvolutionFilter(
                    Matrix.Gaussian7x7, 1.0 / 136.0, 0);
            } break;

            case SmoothingFilterType.Mediano3x3:
            {
                inputBitmap = sourceBitmap.MedianFilter(3);
            } break;

            case SmoothingFilterType.Mediano5x5:
            {
                inputBitmap = sourceBitmap.MedianFilter(5);
            } break;

            case SmoothingFilterType.Mediano7x7:
            {
                inputBitmap = sourceBitmap.MedianFilter(7);
            } break;

            case SmoothingFilterType.Mediano9x9:
            {
                inputBitmap = sourceBitmap.MedianFilter(9);
            } break;

            case SmoothingFilterType.Mean3x3:
            {
                inputBitmap = sourceBitmap.ConvolutionFilter(
                    Matrix.Mean3x3, 1.0 / 9.0, 0);
            } break;

            case SmoothingFilterType.Mean5x5:
            {
                inputBitmap = sourceBitmap.ConvolutionFilter(
                    Matrix.Mean5x5, 1.0 / 25.0, 0);
            } break;

            case SmoothingFilterType.LowPass3x3:
            {
                inputBitmap = sourceBitmap.ConvolutionFilter(
                    Matrix.LowPass3x3, 1.0 / 16.0, 0);
            } break;

            case SmoothingFilterType.LowPass5x5:
            {
                inputBitmap = sourceBitmap.ConvolutionFilter(
                    Matrix.LowPass5x5, 1.0 / 60.0, 0);
            } break;

            case SmoothingFilterType.Sharpen3x3:
            {
                inputBitmap = sourceBitmap.ConvolutionFilter(
                    Matrix.Sharpen3x3, 1.0 / 8.0, 0);
            } break;
            }

            //Progress bar
            form.algorithmProgress = 20;


            // START additional filters ADDED BY GABRIEL
            inputBitmap = AForge.Imaging.Image.Clone(inputBitmap, PixelFormat.Format24bppRgb); //Accepted format
            Bilateral           filterB = new Bilateral();
            Grayscale           filterG = new Grayscale(0.2125, 0.7154, 0.0721);               //arbitrary values
            CannyEdgeDetector   filterE = new CannyEdgeDetector();
            ColorImageQuantizer filterC = new ColorImageQuantizer(new MedianCutQuantizer());
            Dilatation          filterD = new Dilatation();

            //Bilateral filter as present in the article
            filterB.KernelSize    = form.kernelValue;
            filterB.SpatialFactor = form.spatialFactor;
            filterB.ColorFactor   = form.colorFactor;
            filterB.ColorPower    = form.colorPower;
            filterB.ApplyInPlace(inputBitmap);

            form.algorithmProgress = 40;

            /* GENERATING BORDERS */
            //Generate a grayscale version for edge detection
            Bitmap edges = filterG.Apply(inputBitmap);

            filterE.HighThreshold = form.highThreshold;
            filterE.LowThreshold  = form.lowThreshold;
            filterE.ApplyInPlace(edges); // a new image, edges, is created here defining the edges of inputBitmap
            //Dilatation filter
            edges = filterD.Apply(edges);
            generateBorder(edges);
            //Making bg transparent
            edges.MakeTransparent(Color.White);


            form.algorithmProgress = 70;


            // Color reduction as present in the article
            inputBitmap = filterC.ReduceColors(inputBitmap, form.colorReductionFactor);         // reduces to 24 variation

            inputBitmap = AForge.Imaging.Image.Clone(inputBitmap, PixelFormat.Format32bppArgb); //Accepted format

            // images merge
            Bitmap   bitmapResult = new Bitmap(inputBitmap.Width, inputBitmap.Height, inputBitmap.PixelFormat);
            Graphics g            = Graphics.FromImage(bitmapResult);

            g.DrawImage(inputBitmap, 0, 0, inputBitmap.Width, inputBitmap.Height);
            g.DrawImage(edges, 0, 0, inputBitmap.Width, inputBitmap.Height);
            // END additional filters ADDED BY GABRIEL

            form.algorithmProgress = 100;

            return(bitmapResult); // it was returning input Bitmap before
        }