Esempio n. 1
0
        /// <summary>
        /// Called by SearchImage. Merges images together for visual output to form and gathers edge points of rectangles.
        /// </summary>
        protected void MergeImages()
        {
            redColourBitmap = ConvertImageFormat(redColourBitmap);
            redColourBitmap = ApplyColour(redColourBitmap, outputImage, System.Drawing.Color.Red);

            greenColourBitmap = ConvertImageFormat(greenColourBitmap);
            greenColourBitmap = ApplyColour(greenColourBitmap, outputImage, System.Drawing.Color.LightGreen);

            blueColourBitmap = ConvertImageFormat(blueColourBitmap);
            blueColourBitmap = ApplyColour(blueColourBitmap, outputImage, System.Drawing.Color.LightBlue);

            whiteColourBitmap = ConvertImageFormat(whiteColourBitmap);
            whiteColourBitmap = ApplyColour(whiteColourBitmap, outputImage, System.Drawing.Color.FromArgb(100, System.Drawing.Color.White));

            yellowColourBitmap = ConvertImageFormat(yellowColourBitmap);
            yellowColourBitmap = ApplyColour(yellowColourBitmap, outputImage, System.Drawing.Color.FromArgb(100, System.Drawing.Color.Yellow));

            // Find the Y positions of the left, right, top, and bottom results to give the edges.
            yellowTopWallPoints    = GetWallPoints(yellowColourBitmap, yellowWallRectangleTop, false);
            yellowBottomWallPoints = GetWallPoints(yellowColourBitmap, yellowWallRectangleBottom, false);

            whiteTopWallPoints    = GetWallPoints(whiteColourBitmap, whiteWallRectangleTop, false);
            whiteBottomWallPoints = GetWallPoints(whiteColourBitmap, whiteWallRectangleBottom, false);

            blueLinePoints       = GetWallPoints(blueColourBitmap, blueLineRectangle, false);
            secondBlueLinePoints = GetWallPoints(blueColourBitmap, secondBlueLineRectangle, false);


            FindBlueLineThickness();
            outputImage = Greyscale(outputImage);
            outputImage = ConvertImageFormat(outputImage);


            // Merge all images of single colour on black background together.
            Bitmap mergedColourImages = new Bitmap(1, 1);

            AForge.Imaging.Filters.Merge mFilter;

            mFilter            = new AForge.Imaging.Filters.Merge(greenColourBitmap);
            mergedColourImages = mFilter.Apply(blueColourBitmap);

            mFilter            = new AForge.Imaging.Filters.Merge(mergedColourImages);
            mergedColourImages = mFilter.Apply(redColourBitmap);

            mFilter            = new AForge.Imaging.Filters.Merge(mergedColourImages);
            mergedColourImages = mFilter.Apply(yellowColourBitmap);

            mFilter            = new AForge.Imaging.Filters.Merge(mergedColourImages);
            mergedColourImages = mFilter.Apply(whiteColourBitmap);


            // Set the merged colour image on top of greyscale camera image to see what is segmented.
            mFilter     = new AForge.Imaging.Filters.Merge(mergedColourImages);
            outputImage = mFilter.Apply(outputImage);
        }
Esempio n. 2
0
        /// <summary>
        /// Merge two bitmap
        /// </summary>
        public Bitmap ApplyMerge(Bitmap currentBitmap, Bitmap mergeBitmap, int x, int y)
        {
            System.Drawing.Rectangle           cloneRect = new System.Drawing.Rectangle(x, y, mergeBitmap.Width, mergeBitmap.Height);
            System.Drawing.Imaging.PixelFormat format    = mergeBitmap.PixelFormat;
            Bitmap cloneBitmap = currentBitmap.Clone(cloneRect, format);

            AForge.Imaging.Filters.Merge filter = new AForge.Imaging.Filters.Merge(cloneBitmap);
            Bitmap   resultImage = filter.Apply(mergeBitmap);
            Graphics newImage    = Graphics.FromImage(currentBitmap);

            newImage.DrawImage(resultImage, x, y, resultImage.Width, resultImage.Height);
            return(currentBitmap);
        }
Esempio n. 3
0
        unsafe static void Main()
        {
            Test test = new Test();

            test.TestRunningWeightedVariance();
            return;

            var resourceDir = Path.Combine(Directory.GetParent(Directory.GetCurrentDirectory()).FullName, "Resources");
            var imgColor    = Bitmap.FromFile(Path.Combine(resourceDir, "testColorBig.jpg")).ToImage <Bgr, byte>();

            imgColor = imgColor.CorrectContrast(105);

            /*var bmp1 = (System.Drawing.Bitmap)System.Drawing.Bitmap.FromFile("nature-spring.jpg");
             * var image1 = bmp1.ToImage<Gray, float>();
             *
             * var res1 = ResizeNearsetNeighbur.Resize(image1, new Size(640, 480));
             * ImageBox.Show("Interpolated image", res1.ToBitmap());
             *
             * var res = new Image<Bgr, float>(320, 200);
             * image1.GetRectSubPix(new PointF(1.9f, 1.9f), res);
             * ImageBox.Show("Interpolated image", res.ToBitmap());*/

            test.TestLKFlow();


            return; //uncomment if you want execute functions below

            var            bmp   = (System.Drawing.Bitmap)System.Drawing.Bitmap.FromFile("nature-spring.jpg");
            var            image = bmp.ToImage <Bgr, byte>();
            UnmanagedImage uIm   = UnmanagedImage.FromManagedImage(bmp);

            /********************** Bitmap <-> Image<,> *************************/
            //from Bitmap...
            IImage            bmpImg1 = bmp.ToImage();                                                     //generic image (dest format depends on bmp pixel format) (this case: <Color3, byte>)
            Image <Bgr, byte> bmpImg2 = bmp.ToImage <Bgr, byte>();                                         //in this case additional cast is performed (<Color3, byte> => <Bgr, byte>) (no data convert)
            //to Bitmap...
            Bitmap bmpFromImg = bmpImg2.ToBitmap(copyAlways: false /*do not copy if you do not have to*/); //<Bgr, byte> can be casted to <Color3, byte> therefore data is shared between Bitmap and bmPimg2

            /********************** UnmanagedImage <-> Image<,> *************************/
            //from UnmanagedImage...
            var im1FromUIm = uIm.AsImage();             //generic image (dest format depends on bmp pixel format)
            var im2FromUIm = uIm.ToImage <Bgr, byte>(); //in this case additional cast is performed (<Color3, byte> => <Bgr, byte>) (no data convert)
            //to UnmanagedImage...
            var uIm2 = im1FromUIm.ToAForgeImage(copyAlways: false, failIfCannotCast: false);

            /******************* some AForge filter recreation... ***********************/

            /********************** Array <-> Image<,> (also eliminates need for Matrix, UnmanagedImage converters) ********************************/
            int[,] arr = new int[480, 640];

            //from Array...
            var image1FromArray      = arr.ToImage(); //supported for all 2D/3D arrays
            var castedImageFromArray = arr.AsImage(); //supported only on 2D arrays (data is shared)

            //to Array ...
            var arrFromIm = image1FromArray.ToArray(); //output is 2D or 3D array (see function overloads)


            /**************** channel rotate *******************/
            //Image<,> => flexible
            var channels = image.SplitChannels();
            var dest     = new Image <Bgr, byte>(new Image <Gray, byte>[] { channels[1], channels[0], channels[2] });

            //AForge
            AForge.Imaging.Filters.RotateChannels rc = new AForge.Imaging.Filters.RotateChannels();
            rc.Apply(uIm);

            /**************** channel extract *******************/
            //Image<,> => simple
            var ch = image[0];

            //AForge
            AForge.Imaging.Filters.ExtractChannel ec = new AForge.Imaging.Filters.ExtractChannel(0);
            ec.Apply(uIm);

            /**************** Max (see Min also) *******************/
            //Image<,>
            image.Max(image, inPlace: true);

            //AForge
            AForge.Imaging.Filters.Merge m = new AForge.Imaging.Filters.Merge(uIm);
            m.Apply(uIm);

            /**************** Sobel *******************/
            var bmpSquareGray = (System.Drawing.Bitmap)System.Drawing.Bitmap.FromFile("square.bmp");
            var grayIm        = bmpSquareGray.ToImage <Gray, float>(); //currently there are no overloads for magnitude for <byte>, will be fixed later

            //Image<,> => flexible
            var sobelX = grayIm.Sobel(1, 0, 3);
            var sobelY = grayIm.Sobel(0, 1, 3);
            var mag    = sobelX.Magnitude(sobelY); //should use Threshold for values > 255 (not implemented yet)

            //mag.ToBitmap().Save("bla.bmp");  //img.Save(..) is available for <IColor3, byte> and <Gray, byte> (Bitmap compatible formats) should change ?
            //var mag = sobelX.Abs().Add(sobelY.Abs()).Scale(0, 255).Convert<Gray, byte>(); //should work later (it is not implemented)

            //AForge
            AForge.Imaging.Filters.SobelEdgeDetector sobel = new AForge.Imaging.Filters.SobelEdgeDetector();
            var destSobel = sobel.Apply(grayIm.ToAForgeImage());

            //destSobel.ToManagedImage().Save("sobelAForge.bmp");
            return;
        }
Esempio n. 4
0
        public void PerformPredatorActions()
        {
            cameraImage = Camera.Image;
            obstacleRectangle = DetectObstacle(greenFilter, cameraImage, new Vector2(35), ref greenColourBitmap);
            preyRectangle = DetectObstacle(redFilter, cameraImage, new Vector2(10), ref redColourBitmap);
            wallLineRectangle = DetectObstacle(whiteFilter, cameraImage, new Vector2(100, 0), ref whiteColourBitmap);
            redColourBitmap = ConvertImageFormat(redColourBitmap);
            redColourBitmap = ApplyColour(redColourBitmap, cameraImage, System.Drawing.Color.Red);
            greenColourBitmap = ConvertImageFormat(greenColourBitmap);
            greenColourBitmap = ApplyColour(greenColourBitmap, cameraImage, System.Drawing.Color.LightGreen);
            whiteColourBitmap = ApplyColour(whiteColourBitmap, cameraImage, System.Drawing.Color.Cyan);
            whiteColourBitmap = ConvertImageFormat(whiteColourBitmap);

            wallLeftPoint = GetPoint(whiteColourBitmap, wallLineRectangle.X, wallLineRectangle);
            wallRightPoint = GetPoint(whiteColourBitmap, wallLineRectangle.X+wallLineRectangle.Width-1, wallLineRectangle);
            System.Drawing.Point p = GetPoint(whiteColourBitmap, wallLineRectangle.X + wallLineRectangle.Width - (wallLineRectangle.Width/2), wallLineRectangle);
            AForge.Imaging.Filters.HistogramEqualization hFilter = new AForge.Imaging.Filters.HistogramEqualization();

            cameraImage = Greyscale(cameraImage);
            cameraImage = ConvertImageFormat(cameraImage);

            AForge.Imaging.Filters.SimplePosterization jFilter = new AForge.Imaging.Filters.SimplePosterization();
               // cameraImage = jFilter.Apply(cameraImage);

            AForge.Imaging.Filters.Merge mFilter = new AForge.Imaging.Filters.Merge(greenColourBitmap);

            Bitmap mergedColourImages = mFilter.Apply(whiteColourBitmap);

            mFilter = new AForge.Imaging.Filters.Merge(mergedColourImages);
            mergedColourImages = mFilter.Apply(redColourBitmap);

            mFilter = new AForge.Imaging.Filters.Merge(cameraImage);
            cameraImage = mFilter.Apply(mergedColourImages);

            //cameraImage = whiteColourBitmap;
            if (preyRectangle != new System.Drawing.Rectangle(0, 0, 0, 0))
                preyScreenPosition = preyRectangle;
            if (preyRectangle == new System.Drawing.Rectangle(0, 0, 0, 0) && searchingRotationCount < 8)
                trackingState = Tracking.Searching;
            else if (searchingRotationCount >= 8 && trackingState != Tracking.OnScreen)
                trackingState = Tracking.Roaming;
            else
                trackingState = Tracking.OnScreen;
            RobotCommands();
        }
        static unsafe void Main()
        {
            Test test = new Test();

            var resourceDir = Path.Combine(Directory.GetParent(Directory.GetCurrentDirectory()).FullName, "Resources");
            var imgColor = Bitmap.FromFile(Path.Combine(resourceDir, "testColorBig.jpg")).ToImage<Bgr, byte>();
            imgColor = imgColor.CorrectContrast(105);

            /*var bmp1 = (System.Drawing.Bitmap)System.Drawing.Bitmap.FromFile("nature-spring.jpg");
            var image1 = bmp1.ToImage<Gray, float>();

            var res1 = ResizeNearsetNeighbur.Resize(image1, new Size(640, 480));
            ImageBox.Show("Interpolated image", res1.ToBitmap());

            var res = new Image<Bgr, float>(320, 200);
            image1.GetRectSubPix(new PointF(1.9f, 1.9f), res);
            ImageBox.Show("Interpolated image", res.ToBitmap());*/

            test.TestLKFlow();

            return; //uncomment if you want execute functions below

            var bmp = (System.Drawing.Bitmap)System.Drawing.Bitmap.FromFile("nature-spring.jpg");
            var image = bmp.ToImage<Bgr, byte>();
            UnmanagedImage uIm = UnmanagedImage.FromManagedImage(bmp);

            /********************** Bitmap <-> Image<,> *************************/
            //from Bitmap...
            IImage bmpImg1 = bmp.ToImage(); //generic image (dest format depends on bmp pixel format) (this case: <Color3, byte>)
            Image<Bgr, byte> bmpImg2 = bmp.ToImage<Bgr, byte>(); //in this case additional cast is performed (<Color3, byte> => <Bgr, byte>) (no data convert)
            //to Bitmap...
            Bitmap bmpFromImg = bmpImg2.ToBitmap(copyAlways: false /*do not copy if you do not have to*/); //<Bgr, byte> can be casted to <Color3, byte> therefore data is shared between Bitmap and bmPimg2

            /********************** UnmanagedImage <-> Image<,> *************************/
            //from UnmanagedImage...
            var im1FromUIm = uIm.AsImage(); //generic image (dest format depends on bmp pixel format)
            var im2FromUIm = uIm.ToImage<Bgr, byte>(); //in this case additional cast is performed (<Color3, byte> => <Bgr, byte>) (no data convert)
            //to UnmanagedImage...
            var uIm2 = im1FromUIm.ToAForgeImage(copyAlways: false, failIfCannotCast: false);
            /******************* some AForge filter recreation... ***********************/

            /********************** Array <-> Image<,> (also eliminates need for Matrix, UnmanagedImage converters) ********************************/
            int[,] arr = new int[480,640];

            //from Array...
            var image1FromArray = arr.ToImage(); //supported for all 2D/3D arrays
            var castedImageFromArray = arr.AsImage(); //supported only on 2D arrays (data is shared)

            //to Array ...
            var arrFromIm = image1FromArray.ToArray(); //output is 2D or 3D array (see function overloads)

            /**************** channel rotate *******************/
            //Image<,> => flexible
            var channels = image.SplitChannels();
            var dest = new Image<Bgr, byte>(new Image<Gray, byte>[] { channels[1], channels[0], channels[2] });

            //AForge
            AForge.Imaging.Filters.RotateChannels rc = new AForge.Imaging.Filters.RotateChannels();
            rc.Apply(uIm);

            /**************** channel extract *******************/
            //Image<,> => simple
            var ch = image[0];

            //AForge
            AForge.Imaging.Filters.ExtractChannel ec = new AForge.Imaging.Filters.ExtractChannel(0);
            ec.Apply(uIm);

            /**************** Max (see Min also) *******************/
            //Image<,>
            image.Max(image, inPlace:true);

            //AForge
            AForge.Imaging.Filters.Merge m = new AForge.Imaging.Filters.Merge(uIm);
            m.Apply(uIm);

            /**************** Sobel *******************/
            var bmpSquareGray = (System.Drawing.Bitmap)System.Drawing.Bitmap.FromFile("square.bmp");
            var grayIm = bmpSquareGray.ToImage<Gray, float>(); //currently there are no overloads for magnitude for <byte>, will be fixed later

            //Image<,> => flexible
            var sobelX = grayIm.Sobel(1, 0, 3);
            var sobelY = grayIm.Sobel(0, 1, 3);
            var mag = sobelX.Magnitude(sobelY); //should use Threshold for values > 255 (not implemented yet)
            //mag.ToBitmap().Save("bla.bmp");  //img.Save(..) is available for <IColor3, byte> and <Gray, byte> (Bitmap compatible formats) should change ?
            //var mag = sobelX.Abs().Add(sobelY.Abs()).Scale(0, 255).Convert<Gray, byte>(); //should work later (it is not implemented)

            //AForge
            AForge.Imaging.Filters.SobelEdgeDetector sobel = new AForge.Imaging.Filters.SobelEdgeDetector();
            var destSobel = sobel.Apply(grayIm.ToAForgeImage());

            //destSobel.ToManagedImage().Save("sobelAForge.bmp");
            return;
        }
Esempio n. 6
0
        /// <summary>
        /// Called by SearchImage. Merges images together for visual output to form and gathers edge points of rectangles.
        /// </summary>
        protected void MergeImages()
        {
            redColourBitmap = ConvertImageFormat(redColourBitmap);
            redColourBitmap = ApplyColour(redColourBitmap, outputImage, System.Drawing.Color.Red);

            greenColourBitmap = ConvertImageFormat(greenColourBitmap);
            greenColourBitmap = ApplyColour(greenColourBitmap, outputImage, System.Drawing.Color.LightGreen);

            blueColourBitmap = ConvertImageFormat(blueColourBitmap);
            blueColourBitmap = ApplyColour(blueColourBitmap, outputImage, System.Drawing.Color.LightBlue);

            whiteColourBitmap = ConvertImageFormat(whiteColourBitmap);
            whiteColourBitmap = ApplyColour(whiteColourBitmap, outputImage, System.Drawing.Color.FromArgb(100, System.Drawing.Color.White));

            yellowColourBitmap = ConvertImageFormat(yellowColourBitmap);
            yellowColourBitmap = ApplyColour(yellowColourBitmap, outputImage, System.Drawing.Color.FromArgb(100, System.Drawing.Color.Yellow));

            // Find the Y positions of the left, right, top, and bottom results to give the edges.
            yellowTopWallPoints = GetWallPoints(yellowColourBitmap, yellowWallRectangleTop, false);
            yellowBottomWallPoints = GetWallPoints(yellowColourBitmap, yellowWallRectangleBottom, false);

            whiteTopWallPoints = GetWallPoints(whiteColourBitmap, whiteWallRectangleTop, false);
            whiteBottomWallPoints = GetWallPoints(whiteColourBitmap, whiteWallRectangleBottom, false);

            blueLinePoints = GetWallPoints(blueColourBitmap, blueLineRectangle, false);
            secondBlueLinePoints = GetWallPoints(blueColourBitmap, secondBlueLineRectangle, false);

            FindBlueLineThickness();
            outputImage = Greyscale(outputImage);
            outputImage = ConvertImageFormat(outputImage);

            // Merge all images of single colour on black background together.
            Bitmap mergedColourImages = new Bitmap(1, 1);

            AForge.Imaging.Filters.Merge mFilter;

            mFilter = new AForge.Imaging.Filters.Merge(greenColourBitmap);
            mergedColourImages = mFilter.Apply(blueColourBitmap);

            mFilter = new AForge.Imaging.Filters.Merge(mergedColourImages);
            mergedColourImages = mFilter.Apply(redColourBitmap);

            mFilter = new AForge.Imaging.Filters.Merge(mergedColourImages);
            mergedColourImages = mFilter.Apply(yellowColourBitmap);

            mFilter = new AForge.Imaging.Filters.Merge(mergedColourImages);
            mergedColourImages = mFilter.Apply(whiteColourBitmap);

            // Set the merged colour image on top of greyscale camera image to see what is segmented.
            mFilter = new AForge.Imaging.Filters.Merge(mergedColourImages);
            outputImage = mFilter.Apply(outputImage);
        }