Example #1
1
        public List <BlockData> DetectBlocks(Bitmap inputImg, int width, int height)
        {
            Image <Bgra, Byte> img = new Image <Bgra, Byte>(inputImg);

            width  = img.Width;
            height = img.Height;

            // Threshold out bakground
            Image <Gray, Byte> grayImg         = img.Convert <Gray, Byte>();
            Image <Gray, Byte> backgroundMask  = new Image <Gray, Byte>(width, height);
            double             threshold_value = CvInvoke.Threshold(grayImg, backgroundMask, 0, 255, ThresholdType.Otsu);

            Image <Gray, Byte> filledBackground = this.FillMask(backgroundMask);

            VectorOfVectorOfPoint allContours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(backgroundMask, allContours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

            // Remove all contours except table
            int tableContourIdx          = this.FindLargestContourIdx(allContours);
            Image <Gray, Byte> tableMask = new Image <Gray, Byte>(width, height);
            int fillInterior             = -1;

            CvInvoke.DrawContours(tableMask, allContours, tableContourIdx, new MCvScalar(255), fillInterior);
            IInputArray structElem = CvInvoke.GetStructuringElement(ElementShape.Rectangle, STRUCT_ELEM_SIZE, STRUCT_ELEM_ANCHOR);

            CvInvoke.Erode(tableMask, tableMask, structElem, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar(255));

            // Grab objects on table that are in foreground
            Image <Gray, Byte> foregroundMask = new Image <Gray, Byte>(width, height);

            CvInvoke.BitwiseNot(backgroundMask, foregroundMask);

            Image <Gray, Byte> tableForegroundMask = new Image <Gray, Byte>(width, height);

            CvInvoke.BitwiseAnd(foregroundMask, tableMask, tableForegroundMask);

            // Save table mask and mask applied to original image for access as shared resource
            Image <Bgra, Byte> processedImg = new Image <Bgra, Byte>(width, height);

            CvInvoke.BitwiseOr(img, processedImg, processedImg, tableMask);

            // Find contours for blocks on table
            VectorOfVectorOfPoint possibleBlocks = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(tableForegroundMask, possibleBlocks, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);
            VectorOfVectorOfPoint filteredBlocks = this.FilterSmallAreaContours(possibleBlocks);

            // Find block centers
            Point[] blockCenters = this.FindAndMarkContourCenters(filteredBlocks, processedImg);
            return(this.FindColorAtCenters(blockCenters, img));
        }
    public Image <Rgb, byte> GetBodyImage(bool withhandle = false)
    {
        Image <Rgb, byte> body_img = new Image <Rgb, byte>(width, height);

        body_img.SetZero();
        foreach (Vector2 p in this.body)
        {
            body_img[(int)p.y, (int)p.x] = new Rgb(255, 255, 255);
        }

        MaskParser.FillGap(body_img);
        //MaskParser.FillHole(body_img);

        if (withhandle)
        {
            List <Image <Rgb, byte> > handles = this.GetHandleImage();
            foreach (Image <Rgb, byte> hi in handles)
            {
                hi.SetValue(new Rgb(255, 0, 0), hi.Convert <Gray, byte>());
                CvInvoke.BitwiseOr(body_img, hi, body_img);
            }
            MaskParser.FillGap(body_img, 5);
            body_img.Save("xxx.png");
        }


        //if (label == Label.Cylinder)
        //{
        //    body_img = MaskParser.FillBodyWithHandle(body_img, this.GetHandleImage());
        //}
        return(body_img);
    }
Example #3
0
        public void generateMask()
        {
            if (findPayload)
            {
                //Convert BGR to HSV
                CvInvoke.CvtColor(_frame, hsv, ColorConversion.Bgr2Hsv);
                // Isolate Color range of interest, smooth, and convert to b/w
                //Red
                CvInvoke.InRange(hsv, new ScalarArray(lowerBoundRed), new ScalarArray(upperBoundRed), maskRed);
                CvInvoke.GaussianBlur(maskRed, filteredRed, new Size(25, 25), 0.0);
                CvInvoke.Threshold(filteredRed, maskRed, 150.0, 255.0, ThresholdType.Binary);

                //Blue
                CvInvoke.InRange(hsv, new ScalarArray(lowerBoundBlue), new ScalarArray(upperBoundBlue), maskBlue);
                CvInvoke.GaussianBlur(maskBlue, filteredBlue, new Size(25, 25), 0.0);
                CvInvoke.Threshold(filteredBlue, maskBlue, 150.0, 255.0, ThresholdType.Binary);

                //Yellow
                CvInvoke.InRange(hsv, new ScalarArray(lowerBoundYellow), new ScalarArray(upperBoundYellow), maskYellow);
                CvInvoke.GaussianBlur(maskYellow, filteredYellow, new Size(25, 25), 0.0);
                CvInvoke.Threshold(filteredYellow, maskYellow, 150.0, 255.0, ThresholdType.Binary);

                CvInvoke.BitwiseOr(maskRed, maskBlue, maskTemp);
                CvInvoke.BitwiseOr(maskTemp, maskYellow, maskFinal);
            }
            else
            {
                //Convert BGR to HSV
                CvInvoke.CvtColor(_frame, hsv, ColorConversion.Bgr2Hsv);
                // Isolate Color range of interest, smooth, and convert to b/w
                CvInvoke.InRange(hsv, new ScalarArray(lowerBoundRed), new ScalarArray(upperBoundRed), maskRed);
                CvInvoke.GaussianBlur(maskRed, filteredRed, new Size(25, 25), 0.0);
                CvInvoke.Threshold(filteredRed, maskFinal, 150.0, 255.0, ThresholdType.Binary);
            }
        }
Example #4
0
        public static Bitmap Skelatanize(Bitmap image)
        {
            Image <Gray, byte> imgOld = new Image <Gray, byte>(image);
            Image <Gray, byte> img2   = (new Image <Gray, byte>(imgOld.Width, imgOld.Height, new Gray(255))).Sub(imgOld);
            Image <Gray, byte> eroded = new Image <Gray, byte>(img2.Size);
            Image <Gray, byte> temp   = new Image <Gray, byte>(img2.Size);
            Image <Gray, byte> skel   = new Image <Gray, byte>(img2.Size);

            skel.SetValue(0);
            CvInvoke.Threshold(img2, img2, 127, 256, 0);
            var  element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));
            bool done    = false;

            while (!done)
            {
                CvInvoke.Erode(img2, eroded, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Dilate(eroded, temp, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Subtract(img2, temp, temp);
                CvInvoke.BitwiseOr(skel, temp, skel);
                eroded.CopyTo(img2);
                if (CvInvoke.CountNonZero(img2) == 0)
                {
                    done = true;
                }
            }
            return(skel.Bitmap);
        }
Example #5
0
        public Image <Gray, Byte> FillMask(Image <Gray, Byte> input)
        {
            // Fill all parts of image that can be reached from edges
            Image <Gray, Byte> reachableBackground = input.Clone();

            for (int x = 0; x < input.Width; x++)
            {
                MCvScalar fillValue   = new MCvScalar(255);
                Rectangle boundingBox = new Rectangle();
                MCvScalar minDiff     = new MCvScalar(0);
                MCvScalar maxDiff     = new MCvScalar(255);

                // Top pixel
                Point startFromTop = new Point(x, 0);
                if (reachableBackground.Data[0, x, 0] == 0)
                {
                    CvInvoke.FloodFill(reachableBackground, null, startFromTop, fillValue, out boundingBox, minDiff, maxDiff);
                }

                // Top pixel
                Point startFromBottom = new Point(x, input.Height - 1);
                if (reachableBackground.Data[input.Height - 1, x, 0] == 0)
                {
                    CvInvoke.FloodFill(reachableBackground, null, startFromBottom, fillValue, out boundingBox, minDiff, maxDiff);
                }
            }

            for (int y = 0; y < input.Height; y++)
            {
                MCvScalar fillValue   = new MCvScalar(255);
                Rectangle boundingBox = new Rectangle();
                MCvScalar minDiff     = new MCvScalar(0);
                MCvScalar maxDiff     = new MCvScalar(255);

                // Top pixel
                Point startFromLeft = new Point(0, y);
                if (reachableBackground.Data[y, 0, 0] == 0)
                {
                    CvInvoke.FloodFill(reachableBackground, null, startFromLeft, fillValue, out boundingBox, minDiff, maxDiff);
                }

                // Top pixel
                Point startFromRight = new Point(input.Width - 1, y);
                if (reachableBackground.Data[y, input.Width - 1, 0] == 0)
                {
                    CvInvoke.FloodFill(reachableBackground, null, startFromRight, fillValue, out boundingBox, minDiff, maxDiff);
                }
            }

            // Grab unreachable holes in original image
            Image <Gray, Byte> holesToFill = reachableBackground.Clone();

            CvInvoke.BitwiseNot(reachableBackground, holesToFill);

            Image <Gray, Byte> filledImg = input.Clone();

            CvInvoke.BitwiseOr(input, holesToFill, filledImg);
            return(filledImg);
        }
Example #6
0
        public void ThreadMain()
        {
            //MessageBox.Show("Hi from the thread!");

            VideoWriter writer = new VideoWriter("video.mp4", 60, new Size(1280, 720), true);

            int     frame   = 0;
            Capture cap     = new Emgu.CV.Capture(@"C:\Users\Peter Husman\Downloads\Wildlife.wmv");
            Mat     minions = new Capture(@"C:\Users\Peter Husman\Downloads\maxresdefault.jpg").QueryFrame();

            Mat data   = new Mat();
            Mat chroma = new Mat();

            Mat threshold = new Mat();
            Mat bNot      = new Mat();

            Mat minionsMask = new Mat();
            Mat vidMask     = new Mat();



            var filter = new BackgroundSubtractorMOG();

            while (true)
            {
                try
                {
                    cap.Grab();
                    bool grabbed = cap.Retrieve(data);


                    CvInvoke.InRange(minions, new ScalarArray(new Emgu.CV.Structure.MCvScalar(0, 206, 0)), new ScalarArray(new Emgu.CV.Structure.MCvScalar(129, 255, 164)), threshold);
                    threshold.CopyTo(bNot);
                    CvInvoke.BitwiseNot(bNot, bNot);
                    Mask(minions, bNot, minionsMask);

                    Mask(data, threshold, vidMask);

                    CvInvoke.BitwiseOr(minionsMask, vidMask, chroma);

                    //CvInvoke.CvtColor(data, hsv, ColorConversion.Bgr2Hsv);
                    //BackgroundSubtractorMOG
                    //data.Dispose();
                    //CvInvoke.InRange

                    //filter.Apply(data, hsv);
                    //ChromaKey(data, minions, chroma,min,max);
                    //CvInvoke.Imwrite($"{fileLocation}{frame.ToString()}.jpg", data);
                    //writer.Write(chroma);
                    CvInvoke.Imshow("Window", chroma);
                    CvInvoke.WaitKey(1);
                    frame++;
                }
                catch (Exception ex)
                {
                }
            }
        }
Example #7
0
        private void filter()
        {
            if (!imageSelected)
            {
                return;
            }

            int r1, g1, b1, r2, g2, b2;

            getValuesFromControls(out r1, out g1, out b1, out r2, out g2, out b2);

            if (grid.RowCount == 0)
            {
                CvInvoke.InRange(matIn,
                                 new ScalarArray(new MCvScalar(b1, g1, r1)),
                                 new ScalarArray(new MCvScalar(b2, g2, r2)),
                                 matOut);
            }
            else
            {
                //prepare the first range
                Mat temp = new Mat();

                getValuesFromGrid(out r1, out g1, out b1, out r2, out g2, out b2, 0);

                CvInvoke.InRange(matIn,
                                 new ScalarArray(new MCvScalar(b1, g1, r1)),
                                 new ScalarArray(new MCvScalar(b2, g2, r2)),
                                 matOut);

                //apply other ranges
                for (int i = 1; i < grid.RowCount; i++)
                {
                    String type = grid["colType", i].Value.ToString();

                    getValuesFromGrid(out r1, out g1, out b1, out r2, out g2, out b2, i);

                    CvInvoke.InRange(matIn,
                                     new ScalarArray(new MCvScalar(b1, g1, r1)),
                                     new ScalarArray(new MCvScalar(b2, g2, r2)),
                                     temp);

                    //apply operator: Union or Subtract
                    if (type.Equals("Union"))
                    {
                        CvInvoke.BitwiseOr(temp, matOut, matOut);
                    }
                    else
                    {
                        CvInvoke.BitwiseNot(temp, matOut, matOut);
                    }
                }
            }

            img2.Image = matOut.ToImage <Bgr, Byte>();
        }
        public static Bitmap Skelatanize(Bitmap image2)
        {
            Bitmap image = new Bitmap(image2);

            for (int y = 0; (y <= (image.Height - 1)); y++)
            {
                for (int x = 0; (x <= (image.Width - 1)); x++)
                {
                    Color inv = image.GetPixel(x, y);
                    inv = Color.FromArgb(255, (255 - inv.R), (255 - inv.G), (255 - inv.B));
                    image.SetPixel(x, y, inv);
                }
            }

            Image <Gray, byte> imgOld = new Image <Gray, byte>(image);
            Image <Gray, byte> img2   = (new Image <Gray, byte>(imgOld.Width, imgOld.Height, new Gray(255))).Sub(imgOld);
            Image <Gray, byte> eroded = new Image <Gray, byte>(img2.Size);
            Image <Gray, byte> temp   = new Image <Gray, byte>(img2.Size);
            Image <Gray, byte> skel   = new Image <Gray, byte>(img2.Size);

            skel.SetValue(0);
            CvInvoke.Threshold(img2, img2, 127, 255, 0);
            var  element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));
            bool done    = false;

            while (!done)
            {
                CvInvoke.Erode(img2, eroded, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Dilate(eroded, temp, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Subtract(img2, temp, temp);
                CvInvoke.BitwiseOr(skel, temp, skel);
                eroded.CopyTo(img2);
                if (CvInvoke.CountNonZero(img2) == 0)
                {
                    done = true;
                }
            }

            Bitmap image3 = new Bitmap(skel.Bitmap);

            for (int y = 0; (y <= (image.Height - 1)); y++)
            {
                for (int x = 0; (x <= (image.Width - 1)); x++)
                {
                    Color inv = image.GetPixel(x, y);
                    inv = Color.FromArgb(255, (255 - inv.R), (255 - inv.G), (255 - inv.B));
                    image.SetPixel(x, y, inv);
                }
            }


            return(image3);
        }
Example #9
0
        public static void ColorFilter(IInputArray hsvFrame, Image <Gray, byte> dstFrame, Mat tmpFrame, ColorProfile colorProfile, double brightness)
        {
            var range0 = colorProfile.ColorRanges[0];

            CvInvoke.InRange(hsvFrame, new ScalarArray(AdaptMinimum(range0.Minimum.ToMCvScalar(), brightness)), new ScalarArray(range0.Maximum.ToMCvScalar()), dstFrame);

            for (var i = 1; i < colorProfile.ColorRanges.Length; i++)
            {
                var range = colorProfile.ColorRanges[i];
                CvInvoke.InRange(hsvFrame, new ScalarArray(AdaptMinimum(range.Minimum.ToMCvScalar(), brightness)), new ScalarArray(range.Maximum.ToMCvScalar()), tmpFrame);
                CvInvoke.BitwiseOr(tmpFrame, dstFrame, dstFrame);
            }

            CvInvoke.Threshold(dstFrame, dstFrame, 10, 255, ThresholdType.Binary);
        }
Example #10
0
        public static void ColorFilter(Image <Hsv, byte> srcFrame, Image <Gray, byte> dstFrame, Mat tmpFrame, ColorProfile colorProfile, double baseBrightness)
        {
            var range0 = colorProfile.Ranges[0];

            CvInvoke.InRange(srcFrame, new ScalarArray(AdaptMinimum(range0.Min.CvScalar, baseBrightness)), new ScalarArray(range0.Max.CvScalar), dstFrame);

            for (var i = 1; i < colorProfile.Ranges.Length; i++)
            {
                var range = colorProfile.Ranges[i];
                CvInvoke.InRange(srcFrame, new ScalarArray(AdaptMinimum(range.Min.CvScalar, baseBrightness)), new ScalarArray(range.Max.CvScalar), tmpFrame);
                CvInvoke.BitwiseOr(tmpFrame, dstFrame, dstFrame);
            }

            CvInvoke.Threshold(dstFrame, dstFrame, 10, 255, ThresholdType.Binary);
        }
Example #11
0
        public static Mat BuildVoxelLayerImage(Mat curLayer, Mat layerAbove = null, Mat layerBelow = null)
        {
            /* The goal of the VoxelLayerImage is to reduce as much as possible, the number of pixels we need to do 6 direction neighbor checking on */

            /* the outer contours of the current layer should always be checked, they by definition should have an exposed face */
            using var contours = curLayer.FindContours(RetrType.Tree);
            var onlyContours = curLayer.NewBlank();

            CvInvoke.DrawContours(onlyContours, contours, -1, EmguExtensions.WhiteColor, 1);

            bool needAboveDispose = layerAbove is null;
            bool needBelowDispose = layerBelow is null;

            layerAbove ??= curLayer.NewBlank();
            layerBelow ??= curLayer.NewBlank();

            /* anything that is in the current layer but is not in the layer above, by definition has an exposed face */
            Mat upperSubtract = new Mat();

            CvInvoke.Subtract(curLayer, layerAbove, upperSubtract);

            /* anything that is in the current layer but is not in the layer below, by definition has an exposed face */
            Mat lowerSubtract = new Mat();

            CvInvoke.Subtract(curLayer, layerBelow, lowerSubtract);

            /* Or all of these together to get the list of pixels that have exposed face(s) */
            var voxelLayer = curLayer.NewBlank();

            CvInvoke.BitwiseOr(onlyContours, voxelLayer, voxelLayer);
            CvInvoke.BitwiseOr(upperSubtract, voxelLayer, voxelLayer);
            CvInvoke.BitwiseOr(lowerSubtract, voxelLayer, voxelLayer);

            /* dispoose of the layerAbove/layerBelow if they were allocated here */
            if (needAboveDispose)
            {
                layerAbove.Dispose();
            }
            if (needBelowDispose)
            {
                layerBelow.Dispose();
            }
            onlyContours.Dispose();

            return(voxelLayer);
        }
    public Image <Rgb, byte> GetBoundaryImage()
    {
        Image <Rgb, byte>         boundimg   = this.GetBodyImage();
        List <Image <Rgb, byte> > handleimgs = this.GetHandleImage();

        foreach (Image <Rgb, byte> x in handleimgs)
        {
            CvInvoke.BitwiseOr(boundimg, x, boundimg);
        }
        Image <Gray, Byte> cannyimg = boundimg.Canny(60, 100);
        var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));

        CvInvoke.Dilate(cannyimg, cannyimg, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
        boundimg.SetZero();
        boundimg.SetValue(new Rgb(0, 0, 255), cannyimg);

        return(boundimg);
    }
        /// <summary>
        /// This function will use thresholds on the Hue value in the Hue-Saturation-Value (HSV) color space to find caucasian skin within an image.
        /// It will then return a grayscale image with the hand-containing pixels colored white.
        /// </summary>
        /// <param name="inputImage">A standard BGR image.</param>
        /// <returns>Grayscale image with white pixels containing white skin.</returns>
        private static Image <Gray, byte> HsvSegment(Image <Bgr, byte> inputImage)
        {
            var hsvImage    = inputImage.Copy().Convert <Hsv, byte>();
            var outputImage = new Image <Gray, byte>(hsvImage.Size);

            var hsvThresholdOne   = new Hsv(0, 0, 0);
            var hsvThresholdTwo   = new Hsv(30, 255, 255);
            var hsvThresholdThree = new Hsv(160, 0, 0);
            var hsvThresholdFour  = new Hsv(180, 255, 255);

            Image <Gray, byte> lowerThreshold = hsvImage.InRange(hsvThresholdOne, hsvThresholdTwo);
            Image <Gray, byte> upperThreshold = hsvImage.InRange(hsvThresholdThree, hsvThresholdFour);

            CvInvoke.BitwiseOr(lowerThreshold, upperThreshold, outputImage);

            hsvImage.Dispose();
            lowerThreshold.Dispose();
            upperThreshold.Dispose();
            return(outputImage);
        }
Example #14
0
        private void DisplayDepthHSV(Mat depthMat16U)
        {
            if (depthMat16U == null)
            {
                return;
            }

            using (Mat convertedMat8U = new Mat(depthMat16U.Size, DepthType.Cv8U, 1))
                using (Mat colorMat8U3 = new Mat(depthMat16U.Size, DepthType.Cv8U, 3))
                    using (Mat hsvMat8U3 = new Mat(depthMat16U.Size, DepthType.Cv8U, 3))
                        using (Mat hsvConstantMat = new Mat(depthMat16U.Size, DepthType.Cv8U, 3))
                        {
                            depthMat16U.ConvertTo(convertedMat8U, DepthType.Cv8U, 1 / 256d);
                            CvInvoke.CvtColor(convertedMat8U, colorMat8U3, ColorConversion.Gray2Bgr);

                            hsvConstantMat.SetTo(new MCvScalar(0, 255, 255));
                            CvInvoke.BitwiseOr(colorMat8U3, hsvConstantMat, hsvMat8U3);

                            CvInvoke.CvtColor(hsvMat8U3, hsvMat8U3, ColorConversion.Hsv2Bgr);

                            DisplayMatOnBitmap(hsvMat8U3, this.bitmap1);
                            InspectDepthPixel(depthMat16U);
                        }
        }
Example #15
0
    public void GrabCut3(Vector2 point, PAINTING_MODE pm)
    {
        SaveToUndoState();



        if (pm == PAINTING_MODE.BRUSH)
        {
            CvInvoke.Circle(WallMask, point.toPoint(), 30, White, -1);

            WallMask.ApplyToTexture2D(OutputMaskTexture);

            return;
        }



        if (pm == PAINTING_MODE.ERASER)
        {
            CvInvoke.Circle(WallMask, point.toPoint(), 30, Black, -1);

            WallMask.ApplyToTexture2D(OutputMaskTexture);

            return;
        }



        Mat img2 = CameraMat24.Clone();

        Mat imgTarget = CameraMat24.Clone(); //Color de llenado (Rojo para el primer color, Verde para el segundo color

        switch (SelectedColorIndex)
        {
        case 0: imgTarget.SetTo(Red); break;

        case 1: imgTarget.SetTo(Green); break;

        case 2: imgTarget.SetTo(Blue); break;
        }



        Mat imgref = CameraMat24.Clone(); //Color de Referencia (Rosado Perfecto)

        imgref.SetTo(Pink);



        Mat maskEdges2 = EdgeMap.Clone();



        Mat mask1 = new Mat(img2.Rows, img2.Cols, DepthType.Cv8U, 1);

        Mat mask3 = new Mat(img2.Rows, img2.Cols, DepthType.Cv8U, 3);

        mask3.SetTo(Black);



        Point PixelPoint = point.toPoint();

        int Offset = (PixelPoint.Y * img2.Width + PixelPoint.X) * 3;

        int R = CameraColors24[Offset];

        int G = CameraColors24[Offset + 1];

        int B = CameraColors24[Offset + 2];



        MCvScalar LoDiff = new MCvScalar(Mathf.Min(DeltaColor2, B), Mathf.Min(DeltaColor2, G), Mathf.Min(DeltaColor2, R));

        MCvScalar UpDiff = new MCvScalar(Mathf.Min(DeltaColor2, 255 - B), Mathf.Min(DeltaColor2, 255 - G), Mathf.Min(DeltaColor2, 255 - R));



        Rectangle rect = new Rectangle();



        switch (pm)
        {
        case PAINTING_MODE.FLOODFILL:

            CvInvoke.FloodFill(img2, maskEdges2, PixelPoint, Pink, out rect, LoDiff, UpDiff, Connectivity.FourConnected);

            CvInvoke.Compare(img2, imgref, mask3, CmpType.Equal);     //Deben ser todos de 3 canales, mask3 contiene la máscara a pintar

            CvInvoke.Dilate(mask3, mask3, null, Anchor, 2, BorderType.Default, Black);

            CvInvoke.Blur(mask3, mask3, new Size(4, 4), Anchor);



            CvInvoke.ExtractChannel(mask3, mask1, 0); //Se extrae un canal para usar como máscara

            CvInvoke.BitwiseNot(mask3, mask3);        //Negativo de la zona a pintar, para despintar antes de pintar



            CvInvoke.BitwiseAnd(WallMask, mask3, WallMask);           //Despinta primero

            CvInvoke.BitwiseOr(WallMask, imgTarget, WallMask, mask1); //Pinta de rojo, verde o azul la textura objetivo



            break;
        }



        WallMask.ApplyToTexture2D(OutputMaskTexture);



        img2.Dispose();

        imgref.Dispose();

        imgTarget.Dispose();

        mask1.Dispose();

        mask3.Dispose();

        maskEdges2.Dispose();
    }
Example #16
0
    //DetectEdges

    public void DetectEdges()
    {
        CvInvoke.Normalize(CameraMat24, CameraMat24, 1, 254, NormType.MinMax);

        Debug.Log("Kauel: DetectEdges()");

        PauseCamera();

        if (EdgeMap != null)
        {
            EdgeMap.Dispose();
        }

        EdgeMap = new Mat(CameraMat24.Rows + 2, CameraMat24.Cols + 2, DepthType.Cv8U, 1);

        Rectangle roi = new Rectangle(1, 1, CameraMat24.Width, CameraMat24.Height);

        Mat EdgeMapCenter = new Mat(EdgeMap, roi);



        Mat img1 = CameraMat24.Clone();

        Mat img2 = img1.Clone();

        Mat img3 = img1.Clone();



        CvInvoke.FastNlMeansDenoising(img1, img1);            //Elimina el ruido.

        CvInvoke.GaussianBlur(img1, img2, new Size(9, 9), 9); //Blur

        CvInvoke.AddWeighted(img1, 1.5, img2, -0.5, 0, img1, DepthType.Cv8U);

        //img1.Save("C:/dnn/Filter.png");



        Mat imgCanny = img1.Clone();

        CvInvoke.Canny(img1, imgCanny, CannyLow, CannyHigh, CannyAperture);



        CvInvoke.CvtColor(img1, img1, ColorConversion.Bgr2Gray); //Bordes en escala de grises.



        CvInvoke.Sobel(img1, img2, DepthType.Cv32F, 1, 0, BorderAperture, 1);

        CvInvoke.Sobel(img1, img3, DepthType.Cv32F, 0, 1, BorderAperture, 1);



        CvInvoke.ConvertScaleAbs(img2, img2, 1, 0);

        CvInvoke.ConvertScaleAbs(img3, img3, 1, 0);

        CvInvoke.AddWeighted(img2, 1, img3, 1, 0, img3);

        img3.ConvertTo(img3, DepthType.Cv8U);



        //img3.Save("C:/dnn/SobelEroded.png");

        CvInvoke.AdaptiveThreshold(img3, img3, 255, AdaptiveThresholdType.MeanC, ThresholdType.Binary, ContrastAperture, -Contrast);

        //img3.Save("C:/dnn/Adaptive.png");

        CvInvoke.BitwiseOr(imgCanny, img3, img3);

        img3.CopyTo(img2);

        LineSegment2D[] lines = CvInvoke.HoughLinesP(img2, HoughLineRho, HoughLineAngle, HoughLineThreshold, HoughLineMinLineLength, HoughLineMaxGap);

        //img2.SetTo(Black);

        for (int i = 0; i < lines.Length; i++)
        {
            CvInvoke.Line(img3, lines[i].P1, lines[i].P2, White, 1);
        }

        lines = null;

        img3.CopyTo(EdgeMapCenter);



        img1.Dispose();

        img2.Dispose();

        img3.Dispose();

        imgCanny.Dispose();

        EdgeMapCenter.Dispose();
    }
    public static Image <Rgb, byte> FillBodyWithHandle(Image <Rgb, byte> body_img, List <Image <Rgb, byte> > handle_imgs)
    {
        int padding = (int)NOT_CONNECTED_DIS_THRES;

        if (handle_imgs.Count == 0)
        {
            return(body_img);
        }
        Image <Rgb, byte> handle_img_merge = body_img.CopyBlank();

        handle_img_merge.SetZero();
        foreach (Image <Rgb, byte> img in handle_imgs)
        {
            CvInvoke.BitwiseOr(img, handle_img_merge, handle_img_merge);
        }
        var element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));

        CvInvoke.Dilate(handle_img_merge, handle_img_merge, element, new Point(-1, -1), padding, BorderType.Reflect, default(MCvScalar));
        Image <Rgb, byte> to_fill = body_img.CopyBlank();

        CvInvoke.BitwiseAnd(body_img, handle_img_merge, to_fill);
        FillGap(to_fill, padding * 3);
        CvInvoke.BitwiseOr(body_img, to_fill, body_img);
        return(body_img);

        #region old solution
        //// padding img
        //Image<Rgb, byte> larger_img = new Image<Rgb, byte>(body_img.Width + padding * 2, body_img.Height + padding * 2);
        //larger_img.SetZero();
        //for (int i = padding; i < body_img.Height; i++)
        //{
        //    for (int j = padding; j < body_img.Width; j++)
        //    {
        //        larger_img[i, j] = body_img[i - padding, j - padding];
        //    }
        //}

        //// dilation
        //for (int i = padding; i < body_img.Height; i++)
        //{
        //    for (int j = padding; j < body_img.Width; j++)
        //    {
        //        double dis = body_points_dis2handle[i - padding, j - padding];

        //        if (dis != double.MaxValue)
        //        {
        //            for (int k = (int)-dis / 2; k < dis; k++)
        //            {
        //                // rectangle
        //                for (int m = (int)-dis / 2; m < dis; m++)
        //                {
        //                    larger_img[i + k, j + m] = new Rgb(255, 255, 255);
        //                }

        //                //// cross
        //                //larger_img[i - k, j - k] = new Rgb(255, 255, 255);
        //                //larger_img[i - k, j + k] = new Rgb(255, 255, 255);
        //                //larger_img[i + k, j - k] = new Rgb(255, 255, 255);
        //                //larger_img[i + k, j + k] = new Rgb(255, 255, 255);
        //            }
        //        }
        //    }
        //}

        //larger_img.Save("fillhole_dilation_larger.png");

        //// update distance
        //for (int i = padding; i < body_img.Height; i++)
        //{
        //    for (int j = padding; j < body_img.Width; j++)
        //    {
        //        Vector2 point_in_ori_img = new Vector2(j - padding, i - padding);
        //        double dis_after_dilation = Utility.DistancePoint2Set(point_in_ori_img, handle_points);
        //        if (dis_after_dilation <= padding)
        //            body_points_dis2handle[i - padding, j - padding] = dis_after_dilation;
        //    }
        //}

        //// erosion
        //Image<Rgb, byte> erosion_img = new Image<Rgb, byte>(body_img.Width + padding * 2, body_img.Height + padding * 2);
        //for (int i = padding; i < body_img.Height; i++)
        //{
        //    for (int j = padding; j < body_img.Width; j++)
        //    {
        //        bool save = true;
        //        double dis = body_points_dis2handle[i - padding, j - padding];
        //        if (dis != double.MaxValue)
        //        {
        //            for (int k = (int)-dis / 2; k < dis; k++)
        //            {
        //                // rectangle
        //                for (int m = (int)-dis / 2; m < dis; m++)
        //                {
        //                    if (!larger_img[i + k, j + m].Equals(new Rgb(255, 255, 255)))
        //                    {
        //                        save = false;
        //                    }
        //                }
        //            }
        //        }
        //        if (save)
        //            erosion_img[i, j] = larger_img[i, j];
        //    }
        //}

        //larger_img.Save("fillhole_erosion_larger.png");

        //// save back to body_img
        //for (int i = padding; i < erosion_img.Height; i++)
        //{
        //    for (int j = padding; j < erosion_img.Width; j++)
        //    {
        //        erosion_img[i - padding, j - padding] = larger_img[i, j];
        //    }
        //}

        #endregion
    }
Example #18
0
        //##############################################################################################################################################################################################

        /// <summary>
        /// Extract all pieces from the source image.
        /// </summary>
        private void extract_pieces()
        {
            try
            {
                CurrentSolverState = PuzzleSolverState.INIT_PIECES;
                Piece.NextPieceID  = 0;
                CurrentSolverStepPercentageFinished = 0;
                _logHandle.Report(new LogEventInfo("Extracting Pieces"));
                NumberPuzzlePieces = 0;

                Pieces.Clear();
                InputImages.Clear();

                List <string> imageExtensions = new List <string>()
                {
                    ".jpg", ".png", ".bmp", ".tiff"
                };
                FileAttributes  attr           = File.GetAttributes(PuzzlePiecesFolderPath);
                List <FileInfo> imageFilesInfo = new List <FileInfo>();
                if (attr.HasFlag(FileAttributes.Directory))      //detect whether its a directory or file
                {
                    DirectoryInfo folderInfo = new DirectoryInfo(PuzzlePiecesFolderPath);
                    imageFilesInfo = folderInfo.GetFiles().ToList();
                }
                else
                {
                    FileInfo fileInfo = new FileInfo(PuzzlePiecesFolderPath);
                    imageFilesInfo.Add(fileInfo);
                }

                imageFilesInfo = imageFilesInfo.Where(f => imageExtensions.Contains(f.Extension)).ToList();

                int loopCount = 0;

                ParallelOptions parallelOptions = new ParallelOptions
                {
                    CancellationToken      = _cancelToken,
                    MaxDegreeOfParallelism = (PluginFactory.GetGeneralSettingsPlugin().UseParallelLoops ? Environment.ProcessorCount : 1)
                };
                //For each input image
                Parallel.For(0, imageFilesInfo.Count, parallelOptions, (i) =>
                {
                    using (Image <Rgba, byte> sourceImg = new Image <Rgba, byte>(imageFilesInfo[i].FullName)) //.LimitImageSize(1000, 1000))
                    {
                        CvInvoke.MedianBlur(sourceImg, sourceImg, 5);

                        // Get the (first) enabled Plugin for input image mask generation
                        PluginGroupInputImageMask pluginInputImageMask = PluginFactory.GetEnabledPluginsOfGroupType <PluginGroupInputImageMask>().FirstOrDefault();

                        using (Image <Gray, byte> mask = pluginInputImageMask.GetMask(sourceImg))
                        {
                            _logHandle.Report(new LogEventInfo("Extracting Pieces from source image " + i.ToString()));
                            if (PluginFactory.GetGeneralSettingsPlugin().SolverShowDebugResults)
                            {
                                _logHandle.Report(new LogEventImage("Source image " + i.ToString(), sourceImg.Bitmap));
                                _logHandle.Report(new LogEventImage("Mask " + i.ToString(), mask.Bitmap));
                            }

                            CvBlobDetector blobDetector = new CvBlobDetector();                 // Find all blobs in the mask image, extract them and add them to the list of pieces
                            CvBlobs blobs = new CvBlobs();
                            blobDetector.Detect(mask, blobs);

                            foreach (CvBlob blob in blobs.Values.Where(b => b.BoundingBox.Width >= PluginFactory.GetGeneralSettingsPlugin().PuzzleMinPieceSize&& b.BoundingBox.Height >= PluginFactory.GetGeneralSettingsPlugin().PuzzleMinPieceSize))
                            {
                                if (_cancelToken.IsCancellationRequested)
                                {
                                    _cancelToken.ThrowIfCancellationRequested();
                                }

                                Rectangle roi = blob.BoundingBox;

                                Image <Rgba, byte> pieceSourceImg;
                                Image <Gray, byte> pieceMask;

                                try
                                {
                                    if (sourceImg.Height > roi.Height + 4 && sourceImg.Width > roi.Width + 4)
                                    {
                                        roi.Inflate(2, 2);
                                    }
                                    pieceSourceImg = sourceImg.Copy(roi);
                                    pieceMask      = mask.Copy(roi);
                                }
                                catch (Exception)
                                {
                                    roi            = blob.BoundingBox;
                                    pieceSourceImg = sourceImg.Copy(roi);
                                    pieceMask      = mask.Copy(roi);
                                }

                                // Mask out background of piece
                                Image <Rgba, byte> pieceSourceImageForeground = new Image <Rgba, byte>(pieceSourceImg.Size);
                                CvInvoke.BitwiseOr(pieceSourceImg, pieceSourceImg, pieceSourceImageForeground, pieceMask);

                                Image <Gray, byte> pieceMaskInverted = pieceMask.Copy(pieceMask);
                                pieceMaskInverted._Not();
                                Image <Rgba, byte> background = new Image <Rgba, byte>(pieceSourceImg.Size);
                                background.SetValue(new Rgba(255, 255, 255, 0));
                                Image <Rgba, byte> pieceSourceImageBackground = new Image <Rgba, byte>(pieceSourceImg.Size);
                                CvInvoke.BitwiseOr(background, background, pieceSourceImageBackground, pieceMaskInverted);

                                Image <Rgba, byte> pieceSourceImgMasked = new Image <Rgba, byte>(pieceSourceImg.Size);
                                CvInvoke.BitwiseOr(pieceSourceImageForeground, pieceSourceImageBackground, pieceSourceImgMasked);

                                Piece p = new Piece(pieceSourceImgMasked, pieceMask, imageFilesInfo[i].FullName, roi.Location, _logHandle, _cancelToken);
                                lock (_piecesLock) { Pieces.Add(p); }

                                sourceImg.Draw(roi, new Rgba(255, 0, 0, 1), 2);
                                int baseLine  = 0;
                                Size textSize = CvInvoke.GetTextSize(p.PieceID.Replace("Piece", ""), FontFace.HersheyDuplex, 3, 2, ref baseLine);
                                CvInvoke.PutText(sourceImg, p.PieceID.Replace("Piece", ""), Point.Add(roi.Location, new Size(0, textSize.Height + 10)), FontFace.HersheyDuplex, 3, new MCvScalar(255, 0, 0), 2);

                                NumberPuzzlePieces++;

                                pieceSourceImg.Dispose();
                                pieceMask.Dispose();
                                pieceSourceImageForeground.Dispose();
                                pieceMaskInverted.Dispose();
                                background.Dispose();
                                pieceSourceImageBackground.Dispose();
                                pieceSourceImgMasked.Dispose();

                                GC.Collect();
                            }

                            Interlocked.Add(ref loopCount, 1);
                            CurrentSolverStepPercentageFinished = (loopCount / (double)imageFilesInfo.Count) * 100;

                            if (PluginFactory.GetGeneralSettingsPlugin().SolverShowDebugResults)
                            {
                                _logHandle.Report(new LogEventImage("Source Img " + i.ToString() + " Pieces", sourceImg.Bitmap));
                            }
                            InputImages.Add(new ImageDescribedLight(Path.GetFileName(imageFilesInfo[i].FullName), PuzzlePiecesFolderPath + @"\Results\InputImagesMarked\" + Path.GetFileName(imageFilesInfo[i].FullName), sourceImg.Bitmap)); //sourceImg.LimitImageSize(1000, 1000).Bitmap));
                            blobs.Dispose();
                            blobDetector.Dispose();
                            GC.Collect();
                        }
                    }

                    GC.Collect();
                    GC.WaitForPendingFinalizers();
                    GC.Collect();
                });

                Pieces.Sort(p => ((Piece)p).PieceIndex, null);
            }
            catch (OperationCanceledException)
            {
                _logHandle.Report(new LogEventWarning("The operation was canceled. Step: " + CurrentSolverState.ToString()));
                CurrentSolverState = PuzzleSolverState.UNSOLVED;
            }
            catch (Exception ex)
            {
                _logHandle.Report(new LogEventError("The following error occured in step " + CurrentSolverState.ToString() + ":\n" + ex.Message));
                CurrentSolverState = PuzzleSolverState.ERROR;
                CurrentSolverStepPercentageFinished = 100;
            }
        }