private Mat find_ball()
        {
            MCvScalar orangeMin = new MCvScalar(0, 0, 212);     //10 120 100
            MCvScalar orangeMax = new MCvScalar(131, 255, 255); //70 255 255

            Mat arr = new Mat();

            Mat img    = _frame;
            Mat hsvImg = new Mat();

            CvInvoke.CvtColor(img, hsvImg, ColorConversion.Bgr2Hsv);
            CvInvoke.InRange(hsvImg, new ScalarArray(orangeMin), new ScalarArray(orangeMax),
                             hsvImg);
            //CvInvoke.MorphologyEx(hsvImg, hsvImg, MorphOp.Close, new Mat(), new System.Drawing.Point(-1, -1), 5, BorderType.Default, new MCvScalar());
            SimpleBlobDetectorParams param = new SimpleBlobDetectorParams();

            param.FilterByCircularity = false;
            param.FilterByConvexity   = false;
            param.FilterByInertia     = false;
            param.FilterByColor       = false;
            param.MinArea             = 800;
            param.MaxArea             = 5000;
            SimpleBlobDetector detector = new SimpleBlobDetector(param);

            MKeyPoint[] keypoints = detector.Detect(hsvImg);
            Features2DToolbox.DrawKeypoints(img, new VectorOfKeyPoint(keypoints), img, new
                                            Bgr(255, 0, 0), Features2DToolbox.KeypointDrawType.DrawRichKeypoints);

            foreach (var item in keypoints)
            {
                if ((int)item.Point.X > x_min && (int)item.Point.X < x_max && (int)item.Point.Y > y_min && (int)item.Point.Y < y_max)
                {
                    centerX = (int)item.Point.X;
                    centerY = (int)item.Point.Y;
                }
                else
                {
                    centerX = dX;
                    centerY = dY;

                    total_error_x = 0;

                    total_error_y = 0;
                }
            }
            if (keypoints.Length == 0)
            {
                centerX = dX;
                centerY = dY;

                total_error_x = 0;

                total_error_y = 0;
            }

            lbl_x.Content = "Center X: " + centerX;
            lbl_y.Content = "Center Y: " + centerY;

            return(img);
        }
Пример #2
0
        private PointF GetLocation()
        {
            int    x         = CvInvoke.BoundingRectangle(contour).Right - CvInvoke.BoundingRectangle(contour).Width / 2;
            int    y         = CvInvoke.BoundingRectangle(contour).Bottom - CvInvoke.BoundingRectangle(contour).Height / 2;
            PointF p         = new PointF(x, y);
            Mat    usefulMat = new Mat();

            if (noteType > 2)
            {
                CvInvoke.Erode(blobMat, usefulMat,
                               CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(27, 27), new Point(13, 13)),
                               new Point(1, 1), 1, BorderType.Default, new MCvScalar(1));
                CvInvoke.Dilate(usefulMat, usefulMat,
                                CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(27, 27), new Point(13, 13)),
                                new Point(1, 1), 1, BorderType.Default, new MCvScalar(1));
                CvInvoke.BitwiseNot(usefulMat, usefulMat);
            }
            else
            {
                usefulMat = blobMat;
            }
            VectorOfKeyPoint keyPoints = new VectorOfKeyPoint(detector.Detect(usefulMat));

            for (int i = 0; i < keyPoints.Size; i++)
            {
                if (keyPoints[i].Point.Y - width <p.Y && keyPoints[i].Point.Y + width> p.Y)
                {
                    p.X = keyPoints[i].Point.X;
                    p.Y = keyPoints[i].Point.Y;
                    break;
                }
            }
            return(p);
        }
Пример #3
0
        private MKeyPoint[] detector(Image <Gray, Double> src)
        {
            Image <Gray, byte> _image = new Image <Gray, byte>(src.Bitmap);


            SimpleBlobDetectorParams simpleBlobDetectorParams = new SimpleBlobDetectorParams();

            simpleBlobDetectorParams.MinArea = _imageTemple.Width * _imageTemple.Height / 2;
            //simpleBlobDetectorParams.MaxArea = 100000000;
            simpleBlobDetectorParams.ThresholdStep       = 10;
            simpleBlobDetectorParams.MinThreshold        = 50;
            simpleBlobDetectorParams.FilterByArea        = true;
            simpleBlobDetectorParams.FilterByCircularity = false;
            simpleBlobDetectorParams.FilterByColor       = false;
            simpleBlobDetectorParams.FilterByConvexity   = false;
            simpleBlobDetectorParams.FilterByInertia     = false;


            simpleBlobDetectorParams.MaxThreshold = 255;
            simpleBlobDetectorParams.MinThreshold = 50;

            SimpleBlobDetector simpleBlobDetector = new SimpleBlobDetector(simpleBlobDetectorParams);

            MKeyPoint[] keyPoint = simpleBlobDetector.Detect(_image);



            return(keyPoint);
        }
Пример #4
0
        public void TestSimpleBlobDetector()
        {
            Mat box = EmguAssert.LoadMat("box.png");
            SimpleBlobDetectorParams p        = new SimpleBlobDetectorParams();
            SimpleBlobDetector       detector = new SimpleBlobDetector(p);

            MKeyPoint[] keypoints = detector.Detect(box);
        }
Пример #5
0
        private void button2_Click(object sender, EventArgs e)
        {
            img = new Image <Bgr, byte>(open.FileName);
            Image <Gray, byte> imgGray = img.Convert <Gray, byte>();
            /* blob detector */
            //Gray Gavg = imgGray.GetAverage();
            double minValue = 255;
            double maxValue = 0;
            Point  minLoc   = new Point();
            Point  maxLoc   = new Point();

            CvInvoke.MinMaxLoc(imgGray, ref minValue, ref maxValue, ref minLoc, ref maxLoc);

            SimpleBlobDetectorParams blobparams = new SimpleBlobDetectorParams();

            blobparams.FilterByArea        = true;                //斑点面积的限制变量
            blobparams.MinArea             = 2000;                // 斑点的最小面积
            blobparams.MaxArea             = 300000;              // 斑点的最大面积
            blobparams.MinThreshold        = (float)minValue + 1; //二值化的起始阈值,即公式1的T1
            blobparams.MaxThreshold        = (float)maxValue;     //二值化的终止阈值,即公式1的T2
            blobparams.FilterByCircularity = true;                ////斑点圆度的限制变量,默认是不限制
            blobparams.MinCircularity      = (float)0.5;          //斑点的最小圆度
            blobparams.MaxCircularity      = 1;                   //斑点的最大圆度
            blobparams.FilterByConvexity   = true;                //斑点凸度的限制变量
            blobparams.MinConvexity        = (float)0.8;          //斑点的最小凸度
            blobparams.MaxConvexity        = 10;                  //斑点的最大凸度
            blobparams.FilterByInertia     = true;                // //斑点惯性率的限制变量
            blobparams.MinInertiaRatio     = (float)0.4;          //斑点的最小惯性率
            blobparams.MaxInertiaRatio     = 1;                   //斑点的最大惯性率
            blobparams.FilterByColor       = false;               //斑点颜色的限制变量
            blobparams.blobColor           = 255;                 //斑点颜色的限制变量
            blobparams.ThresholdStep       = 135;                 //二值化的阈值步长,即公式1的t
            blobparams.MinRepeatability    = new IntPtr(2);       //重复的最小次数,只有属于灰度图像斑点的那些二值图像斑点数量大于该值时,该灰度图像斑点才被认为是特征点
            SimpleBlobDetector detector = new SimpleBlobDetector(blobparams);

            MKeyPoint[]       keypoints = detector.Detect(imgGray);
            Image <Bgr, byte> imgBgr    = img.Copy();

            foreach (MKeyPoint keypoint in keypoints)
            {
                imgBgr.Draw(new Rectangle((int)(keypoint.Point.X - keypoint.Size / 2), (int)(keypoint.Point.Y - keypoint.Size / 2), (int)keypoint.Size, (int)keypoint.Size), new Bgr(255, 0, 0), 1);
                imageBox2.Image = imgBgr;
            }
        }
Пример #6
0
        // find circles/dots using blob detection
        private static void FindBlob(CvCapture cap, CvWindow winScr)
        {
            SimpleBlobDetector.Params blobParameters = new SimpleBlobDetector.Params();

            // threshold (gray value)
            blobParameters.MinThreshold = blobMinThreshold;
            blobParameters.MaxThreshold = blobMaxThreshold;
            // area (pixel count)
            blobParameters.FilterByArea = true;
            blobParameters.MinArea      = blobMinArea;
            blobParameters.MaxArea      = blobMaxArea;
            // circularity
            blobParameters.FilterByCircularity = true;
            blobParameters.MinCircularity      = blobMinCircularity;
            // convexity - probably not needed - maybe eleminates false positives
            blobParameters.FilterByConvexity = true;
            blobParameters.MinConvexity      = blobMinConvexity;
            //// inertia - what does the values mean exactly
            //blobParameters.FilterByInertia = true;
            //blobParameters.MinInertiaRatio =

            SimpleBlobDetector blobDetector = new SimpleBlobDetector(blobParameters);

            gray = new IplImage(cap.QueryFrame().Size, BitDepth.U8, 1);

            while (CvWindow.WaitKey(10) != 27)
            {
                IplImage iplImage = PerspectiveCorretoin.GetCorrectedImage(cap.QueryFrame());
                Cv.CvtColor(iplImage, gray, ColorConversion.RgbToGray);

                Mat mat = new Mat(gray);
                mat.PyrDown(new Size(mat.Width / 2, mat.Height / 2));

                KeyPoint[] keypoints = blobDetector.Detect(mat);

                foreach (KeyPoint item in keypoints)
                {
                    Cv.DrawCircle(gray, new CvPoint2D32f(item.Pt.X, item.Pt.Y), (int)(item.Size * 3), CvColor.Green);
                    Console.WriteLine("Found blob | size = " + item.Size);
                }
                winScr.Image = gray;
            }
        }
Пример #7
0
        public override void ActionExcute()
        {
            Stopwatch sw = new Stopwatch();

            sw.Start();
            _imageInput = VisionManage.listScene[VisionManage.iCurrSceneIndex].listAction[actionData.imageSrc - 1].imageResult.Clone();


            Image <Gray, byte> _image = imageInput.Clone();

            if (0 != actionSimpleBlobData.InputAOIWidth && 0 != actionSimpleBlobData.InputAOIHeight)
            {
                _image.ROI = new Rectangle(actionSimpleBlobData.InputAOIX, actionSimpleBlobData.InputAOIY, actionSimpleBlobData.InputAOIWidth, actionSimpleBlobData.InputAOIHeight);
            }
            Mat mask = new Mat(_image.Size, DepthType.Cv16S, 1);
            VectorOfKeyPoint vectorOfKeyPoint = new VectorOfKeyPoint();

            MKeyPoint[] keyPoint;

            keyPoint = sBDetector.Detect(_image);


            for (int i = 0; i < keyPoint.Length; i++)
            {
                CircleF cf1 = new CircleF(keyPoint[i].Point, 6);
                _image.Draw(cf1, new Gray(100), 2, Emgu.CV.CvEnum.LineType.EightConnected, 0);
            }


            _imageResult = _imageInput.Clone();

            _image.CopyTo(_imageResult);
            if (actionSimpleBlobData.bROIReset)
            {
                CvInvoke.cvResetImageROI(_imageResult);
            }
            CvInvoke.cvResetImageROI(_imageInput);
            sw.Stop();
        }
Пример #8
0
        public void FindBlobs(bool draw, bool undistort)
        {
            _mKeyPoints = _blobDetector.Detect(_searchMat);

            if (_mKeyPoints.Length != 0)
            {
                VectorOfKeyPoint _vectorOfKeyPoint = new VectorOfKeyPoint(_mKeyPoints);

                if (draw)
                {
                    Features2DToolbox.DrawKeypoints(_searchMat, _vectorOfKeyPoint, _searchMat, _dColor);
                }


                _points = new PointF[_vectorOfKeyPoint.Size];
                for (int i = 0; i < _vectorOfKeyPoint.Size; i++)
                {
                    _points[i] = _vectorOfKeyPoint[i].Point;
                }

                if (undistort)
                {
                    VectorOfPointF _vectorOfPointF = new VectorOfPointF(_points);
                    VectorOfPointF _uVectorOfPoint = new VectorOfPointF();

                    CvInvoke.UndistortPoints(_vectorOfPointF, _uVectorOfPoint, _cameraMatrix, _distCoeffs);
                    PointF[] pu = _uVectorOfPoint.ToArray();

                    for (int i = 0; i < pu.Length; i++)
                    {
                        _points[i].X = pu[i].X * (float)_fx + (float)_cx;
                        _points[i].Y = pu[i].Y * (float)_fy + (float)_cy;
                    }
                }

                OnBlobDetected?.Invoke(new BlobDetectorEventArgs(_points, _deviceNum));
            }
        }
        static void Main(string[] args)
        {
            String win1 = "Orange Detector"; //The name of the window

            CvInvoke.NamedWindow(win1);      //Create the window using the specific name

            MCvScalar orangeMin = new MCvScalar(10, 211, 140);
            MCvScalar orangeMax = new MCvScalar(18, 255, 255);

            Mat img    = new Mat("fruits.jpg", ImreadModes.AnyColor);
            Mat hsvImg = new Mat();

            CvInvoke.CvtColor(img, hsvImg, ColorConversion.Bgr2Hsv);

            CvInvoke.InRange(hsvImg, new ScalarArray(orangeMin), new ScalarArray(orangeMax), hsvImg);

            CvInvoke.MorphologyEx(hsvImg, hsvImg, MorphOp.Close, new Mat(), new Point(-1, -1), 5, BorderType.Default, new MCvScalar());

            SimpleBlobDetectorParams param = new SimpleBlobDetectorParams();

            param.FilterByCircularity = false;
            param.FilterByConvexity   = false;
            param.FilterByInertia     = false;
            param.FilterByColor       = false;
            param.MinArea             = 1000;
            param.MaxArea             = 50000;

            SimpleBlobDetector detector = new SimpleBlobDetector(param);

            MKeyPoint[] keypoints = detector.Detect(hsvImg);
            Features2DToolbox.DrawKeypoints(img, new VectorOfKeyPoint(keypoints), img, new Bgr(255, 0, 0), Features2DToolbox.KeypointDrawType.DrawRichKeypoints);

            CvInvoke.Imshow(win1, img); //Show image
            CvInvoke.WaitKey(0);        //Wait for key press before executing next line
            CvInvoke.DestroyWindow(win1);
        }
Пример #10
0
        internal unsafe int ProcessFrame(ushort *frameData, uint infraredFrameDataSize, FrameDescription infraredFrameDescription, bool captureSpell, string spellName)
        {
            // If Valid Trace has been detected, we either need to process it or complete the effect that follows it.
            if (validTraceDetected)
            {
                // Process the traceFinal produced during the last frame.
                // The trace actually ended last frame, but instead of processing it right away,
                // we store that trace away until the next frame so the user can see their finished
                // trace before the CPU is plugged up with processing.
                if (!validTraceProcessed)
                {
                    if (captureSpell)
                    {
                        bool   path_found = false;
                        string path       = "";
                        int    counter    = 0;
                        while (!path_found)
                        {
                            path = Path.Combine(SAVE_PREFIX, $"{spellName}_{counter}.png");
                            if (!File.Exists(path))
                            {
                                path_found = true;
                            }
                            else
                            {
                                counter++;
                            }
                        }
                        Cv2.ImWrite(path, traceFinal);
                    }
                    else
                    {
                        // Starting the image as a larger image and then dilating/downsizing seems to produce better results than directly drawing the spell small.
                        Mat kernel = new Mat(5, 5, MatType.CV_8UC1);
                        kernel.SetTo(new Scalar(1));
                        Mat squeezed = new Mat();
                        Cv2.Dilate(traceFinal, squeezed, kernel, iterations: 2);
                        Cv2.Resize(squeezed, squeezed, new Size(SpellAI.TRACE_AI_SIZE, SpellAI.TRACE_AI_SIZE));
                        int     pixels = SpellAI.TRACE_AI_SIZE * SpellAI.TRACE_AI_SIZE;
                        float[] sample = new float[pixels];
                        byte *  data   = (byte *)squeezed.Data;
                        for (int i = 0; i < pixels; i++)
                        {
                            sample[i] = (float)data[i];
                        }
                        var result = spellAI.Identify(sample);
                        Task.Run(() => gameController.TriggerSpell(result));
                        spellArt = new Mat();
                        Cv2.ImRead($"{ART_PREFIX}{result}.png", ImreadModes.Grayscale).ConvertTo(spellArt, MatType.CV_32FC1, 1 / 256.0);
                        //Cv2.PutText(traceCanvas, result.ToString(), new Point(5, traceCanvas.Height-5), HersheyFonts.HersheySimplex, 1.5, Scalar.White);
                    }
                    validTraceProcessed = true;
                }
                //traceCanvas.SetTo(new Scalar(0));
                var current_effect_time = (DateTime.Now - traceDetectedEffectStart).TotalSeconds;
                //Cv2.Circle(traceCanvas,
                //    new Point(infraredFrameDescription.Width / 2, infraredFrameDescription.Height / 2),
                //    (int)(infraredFrameDescription.Width * (current_effect_time / VALID_TRACE_EFFECT_DURATION)),
                //    Scalar.White,
                //    thickness: 5);
                if (current_effect_time <= EFFECT_TRACE_DURATION)
                {
                    // Do nothing. traceCanvas is set to the preview right as soon as it is created (in EndTrace),
                    // so we don't need to update it here.
                }
                else if (current_effect_time <= EFFECT_TRACE_DURATION + EFFECT_TRANSITION_DURATION && !captureSpell)
                {
                    var ratio = (current_effect_time - EFFECT_TRACE_DURATION) / EFFECT_TRANSITION_DURATION;
                    Cv2.AddWeighted(spellTrace, 1 - ratio, spellArt, ratio, 0, traceCanvas);
                }
                else if (current_effect_time <= EFFECT_TRACE_DURATION + EFFECT_TRANSITION_DURATION + EFFECT_ART_DURATION && !captureSpell)
                {
                    //Yes, this will be repeated a whole bunch of times for no reason, but I don't care enough to fix it. So.
                    spellArt.CopyTo(traceCanvas);
                }
                else
                {
                    validTraceDetected  = false;
                    validTraceProcessed = false;
                }

                return(0);
            }
            else
            {
                //If ValidTraceDetected is false, then we need to work on detecting a new one.
                var input     = new Mat(infraredFrameDescription.Height, infraredFrameDescription.Width, MatType.CV_16U, (IntPtr)frameData);
                Mat converted = new Mat();
                input.ConvertTo(converted, MatType.CV_8U, 1.0 / 256.0);

                Mat mask = new Mat();
                mog.Apply(converted, mask);

                var keypoints = blobby.Detect(mask);
                if (!TraceDetected()) // Show the user's beautiful face while no spell is being drawn.
                {
                    //traceCanvas.SetTo(new Scalar(0));
                    //Cv2.BitwiseAnd(converted, mask, converted);
                    foreach (var keypoint in keypoints)
                    {
                        Cv2.Circle(converted, (Point)keypoint.Pt, 10 /*(int)keypoint.Size*/, Scalar.White, 2);
                    }
                    converted.ConvertTo(traceCanvas, MatType.CV_32F, 1.0 / 256.0);
                }

                // This function call produces the traceFinal image, which gets saved or processed by ML.
                // However, it does not do anything with that image; we intentionally wait a frame so that the user has a spell to look at before clogging up the CPU.
                ProcessKeypoints(keypoints);
                converted.Dispose();
                mask.Dispose();
                input.Dispose();
                return(keypoints.Count());
            }
        }
Пример #11
0
    private void DemoIRBlobTrack()
    {
        int IRWidth  = kinectManager.IRWidth;
        int IRHeight = kinectManager.IRHeight;

        //get image and convert to threshold image
        Mat irImage = new Mat(IRHeight, IRWidth, MatType.CV_8UC4, kinectManager.IRRawData);              //rows=height, cols=width
        Mat ir8Bit  = new Mat();

        Cv2.CvtColor(irImage, ir8Bit, ColorConversionCodes.RGBA2GRAY);
        Cv2.Threshold(ir8Bit, ir8Bit, thresh: 200, maxval: 255, type: ThresholdTypes.Binary);

        //Find blobs
        SimpleBlobDetector.Params detectorParams = new SimpleBlobDetector.Params
        {
            //MinDistBetweenBlobs = 10, // 10 pixels between blobs
            //MinRepeatability = 1,

            //MinThreshold = 100,
            //MaxThreshold = 255,
            //ThresholdStep = 5,

            FilterByArea = false,
            //FilterByArea = true,
            //MinArea = 0.001f, // 10 pixels squared
            //MaxArea = 500,

            FilterByCircularity = false,
            //FilterByCircularity = true,
            //MinCircularity = 0.001f,

            FilterByConvexity = false,
            //FilterByConvexity = true,
            //MinConvexity = 0.001f,
            //MaxConvexity = 10,

            FilterByInertia = false,
            //FilterByInertia = true,
            //MinInertiaRatio = 0.001f,

            FilterByColor = false
                            //FilterByColor = true,
                            //BlobColor = 255 // to extract light blobs
        };

        SimpleBlobDetector simpleBlobDetector = SimpleBlobDetector.Create(detectorParams);

        KeyPoint[] blobs = simpleBlobDetector.Detect(ir8Bit);


        foreach (KeyPoint kp in blobs)
        {
            Vector2 blobPt = new Vector2(kp.Pt.X, kp.Pt.Y);

            //transform ir point to unity world space
            Vector2 irDimensions = new Vector2(kinectManager.IRWidth, kinectManager.IRHeight);
            irTrack.transform.localPosition = KinectCVUtilities.TransformTextureToUnity(irPlane, irDimensions, blobPt) + irOffset;


            //transform ir point to color space, then world space
            DepthSpacePoint depthPt = new DepthSpacePoint();
            depthPt.X = blobPt.x;
            depthPt.Y = blobPt.y;
            double          depth         = GetAvg(kinectManager.DepthData, (int)depthPt.X, (int)depthPt.Y, kinectManager.DepthWidth, kinectManager.DepthHeight);
            ColorSpacePoint colorMappedPt = kinectManager.Sensor.CoordinateMapper.MapDepthPointToColorSpace(depthPt, (ushort)depth);

            Vector2 colorDimensions = new Vector2(kinectManager.ColorWidth, kinectManager.ColorHeight);
            Vector2 colorPt         = new Vector2(colorMappedPt.X, colorMappedPt.Y);
            colorTrack.transform.localPosition = KinectCVUtilities.TransformTextureToUnity(colorPlane, colorDimensions, colorPt) + colorOffset;
        }


        //convert back to unity texture, add nice debug drawings
        Mat irImageKeyPoints = new Mat();

        Cv2.DrawKeypoints(ir8Bit, blobs, irImageKeyPoints, color: Scalar.FromRgb(255, 0, 0),
                          flags: DrawMatchesFlags.DrawRichKeypoints);

        //Convert back to RGBA32
        Mat irImageOut = new Mat(IRWidth, IRHeight, MatType.CV_8UC4);

        Cv2.CvtColor(irImageKeyPoints, irImageOut, ColorConversionCodes.BGR2RGBA);      //OpenCV is weird and has it in BGR format

        //load onto texture
        byte[] rawTextureData = KinectCVUtilities.ConvertMatToBytes(irImageOut);

        if (overrideIRTexture)
        {
            kinectManager.IRTexture.LoadRawTextureData(rawTextureData);
            kinectManager.IRTexture.Apply();
        }
    }
Пример #12
0
        public List <Box> DetectObjects(DateTime timestamp, Mat image, int frameIndex, out Mat fg)
        {
            if (regionOfInterest != null)
            {
                bgs.SetRegionOfInterest(regionOfInterest);
            }

            Cv2.GaussianBlur(image, blurredFrame, Size.Zero, PRE_BGS_BLUR_SIGMA);

            // fgMask is the original foreground bitmap returned by opencv MOG2
            fgMask = bgs.DetectForeground(blurredFrame, frameIndex);
            fg     = fgMask;
            if (fgMask == null)
            {
                return(null);
            }

            // pre-processing
            Cv2.Threshold(fgMask, fgWOShadows, 200, 255, ThresholdTypes.Binary);
            Cv2.MedianBlur(fgWOShadows, fgSmoothedMask2, MEDIAN_BLUR_SIZE);
            Cv2.GaussianBlur(fgSmoothedMask2, fgSmoothedMask3, Size.Zero, GAUSSIAN_BLUR_SIGMA);
            Cv2.Threshold(fgSmoothedMask3, fgSmoothedMask4, GAUSSIAN_BLUR_THRESHOLD, 255, ThresholdTypes.Binary);

            fg = fgSmoothedMask4;

            CvBlobs blobs = new CvBlobs();

            KeyPoint[] points = _blobDetector.Detect(fgSmoothedMask4);
            //blobs.FilterByArea(MIN_BLOB_SIZE, int.MaxValue);

            //// filter overlapping blobs
            //HashSet<uint> blobIdsToRemove = new HashSet<uint>();
            //foreach (var b0 in blobs)
            //    foreach (var b1 in blobs)
            //    {
            //        if (b0.Key == b1.Key) continue;
            //        if (b0.Value.BoundingBox.Contains(b1.Value.BoundingBox))
            //            blobIdsToRemove.Add(b1.Key);
            //    }
            //foreach (uint blobid in blobIdsToRemove)
            //    blobs.Remove(blobid);

            // adding text to boxes and foreground frame
            List <Box> newBlobs = new List <Box>();
            uint       id       = 0;

            foreach (var point in points)
            {
                int x    = (int)point.Pt.X;
                int y    = (int)point.Pt.Y;
                int size = (int)point.Size;
                Box box  = new Box("", x - size, x + size, y - size, y + size, frameIndex, id);
                id++;
                newBlobs.Add(box);

                Cv2.Rectangle(fgSmoothedMask4, new OpenCvSharp.Point(x - size, y - size), new OpenCvSharp.Point(x + size, y + size), new Scalar(255), 1);
                Cv2.PutText(fgSmoothedMask4, box.ID.ToString(), new OpenCvSharp.Point(x, y - size), HersheyFonts.HersheyPlain, 1.0, new Scalar(255.0, 255.0, 255.0));
            }
            Cv2.PutText(fgSmoothedMask4, "frame: " + frameIndex, new OpenCvSharp.Point(10, 10), HersheyFonts.HersheyPlain, 1, new Scalar(255, 255, 255));

            newBlobs.ForEach(b => b.Time      = timestamp);
            newBlobs.ForEach(b => b.Timestamp = frameIndex);
            return(newBlobs);
        }
Пример #13
0
        Vector3 triangulate(int j, HyperMegaStuff.HyperMegaLines drawer = null)
        {
            Ray[] rays         = new Ray[2];
            Mat   workingImage = new Mat(calibrationDevices[j].webcam.leftImage.Height,
                                         calibrationDevices[j].webcam.leftImage.Width,
                                         calibrationDevices[j].webcam.leftImage.Type(), 0);

            for (int i = 0; i < 2; i++)
            {
                Mat curMat = i == 0 ? calibrationDevices[j].webcam.leftImage :
                             calibrationDevices[j].webcam.rightImage;

                if (calibrationDevices[j].subtractionImage[i] != null)
                {
                    // Subtract the background from the curMat
                    Cv2.Subtract(curMat, calibrationDevices[j].subtractionImage[i], workingImage);

                    // Threshold the image to separate black and white
                    Cv2.Threshold(workingImage, workingImage, blobThreshold, 255, ThresholdTypes.BinaryInv); // TODO MAKE THRESHOLD TUNABLE

                    // Detect Blobs using the Mask
                    var settings = new SimpleBlobDetector.Params();
                    settings.FilterByArea        = false;
                    settings.FilterByColor       = false;
                    settings.FilterByInertia     = true;
                    settings.FilterByConvexity   = true;
                    settings.FilterByCircularity = false;
                    SimpleBlobDetector detector = SimpleBlobDetector.Create();
                    KeyPoint[]         blobs    = detector.Detect(workingImage, calibrationDevices[j].maskImage[i]);
                    Cv2.DrawKeypoints(workingImage, blobs, workingImage, 255);
                    int biggest = -1; float size = 0;
                    for (int k = 0; k < blobs.Length; k++)
                    {
                        if (blobs[k].Size > size)
                        {
                            biggest = k;
                            size    = blobs[k].Size;
                        }
                    }

                    // If there's only one blob in this image, assume it's the white circle
                    if (blobs.Length > 0)
                    {
                        float[] pointArr         = { blobs[biggest].Pt.X, blobs[biggest].Pt.Y };
                        Mat     point            = new Mat(1, 1, MatType.CV_32FC2, pointArr);
                        Mat     undistortedPoint = new Mat(1, 1, MatType.CV_32FC2, 0);
                        Cv2.UndistortPoints(point, undistortedPoint, calibrationDevices[j].calibration.cameras[i].cameraMatrixMat,
                                            calibrationDevices[j].calibration.cameras[i].distCoeffsMat,
                                            calibrationDevices[j].calibration.cameras[i].rectificationMatrixMat);
                        Point2f[] rectilinear = new Point2f[1];
                        undistortedPoint.GetArray(0, 0, rectilinear);
                        Transform camera = i == 0 ? calibrationDevices[j].LeftCamera : calibrationDevices[j].RightCamera;
                        rays[i] = new Ray(camera.position, camera.TransformDirection(
                                              new Vector3(-rectilinear[0].X, rectilinear[0].Y, 1f)));
                        if (drawer != null)
                        {
                            drawer.color = ((j == 0) != (i == 0)) ? Color.cyan : Color.red;
                            drawer.DrawRay(rays[i].origin, rays[i].direction);
                        }
                    }
                }
            }
            workingImage.Release();

            // Only accept the triangulated point if the rays match up closely enough
            if (rays[0].origin != Vector3.zero &&
                rays[1].origin != Vector3.zero)
            {
                Vector3 point1 = RayRayIntersection(rays[0], rays[1]);
                Vector3 point2 = RayRayIntersection(rays[1], rays[0]);

                if (Vector3.Distance(point1, point2) < 0.005f)
                {
                    return((point1 + point2) * 0.5f);
                }
                else
                {
                    return(Vector3.zero);
                }
            }
            else
            {
                return(Vector3.zero);
            }
        }
Пример #14
0
        private unsafe void OpenCV(ref Bitmap bitmap)
        {
            Mat         testMat = BitmapConverter.ToMat(bitmap);
            MatOfDouble mu      = new MatOfDouble();
            MatOfDouble sigma   = new MatOfDouble();

            Cv2.MeanStdDev(testMat, mu, sigma);
            double mean = mu.GetArray(0, 0)[0];

            mu.Dispose();
            sigma.Dispose();

            SimpleBlobDetector.Params circleParameters = new SimpleBlobDetector.Params();
            circleParameters.FilterByCircularity = true;
            circleParameters.MinCircularity      = (float)0.85;
            circleParameters.MaxCircularity      = (float)1;
            circleParameters.MinArea             = 30; // Modify the value on the fly (TODO use bigger circle)

            SimpleBlobDetector detectCircleBlobs = new SimpleBlobDetector(circleParameters);

            fingerPoints = detectCircleBlobs.Detect(testMat);
            detectCircleBlobs.Dispose();

            // If Finger found basically
            if (fingerPoints != null)
            {
                this.fingerSize = 0;
                int fingerIndex = -1;
                for (int i = 0; i < fingerPoints.Length; i++)
                {
                    if (fingerPoints[i].Size >= this.fingerSize)
                    {
                        this.fingerSize = (int)fingerPoints[i].Size;
                        fingerIndex     = i;
                    }
                }

                if (fingerIndex != -1)
                {
                    OpenCvSharp.CPlusPlus.Point coordinate = fingerPoints[fingerIndex].Pt;
                    this.fingerSize = (int)((fingerPoints[fingerIndex].Size) * Math.Sqrt(2));
                    testMat.Set <Vec3b>(coordinate.Y, coordinate.X, new Vec3b(0, 255, 0));
                    RotatedRect rRect           = new RotatedRect(new Point2f(coordinate.X, coordinate.Y), new Size2f(this.fingerSize, this.fingerSize), 0);
                    Point2f[]   circleVerticies = rRect.Points();
                    //this.fingerCoordinates[0] = coordinate.X;
                    //this.fingerCoordinates[1] = coordinate.Y;
                    int height = (int)(circleVerticies[0].Y - circleVerticies[1].Y);
                    int width  = (int)(circleVerticies[2].X - circleVerticies[1].X);
                    int startX = (int)(circleVerticies[0].X);
                    int startY = (int)(circleVerticies[1].Y);
                    this.fingerDepth = MapColortoDepth(startX, startY, this.fingerSize, this.fingerSize);
                    OpenCvSharp.CPlusPlus.Rect featureRect = new OpenCvSharp.CPlusPlus.Rect(startX, startY, this.fingerSize, this.fingerSize);

                    // Draw box around finger
                    for (int j = 0; j < 4; j++)
                    {
                        Cv2.Line(testMat, circleVerticies[j], circleVerticies[(j + 1) % 4], new Scalar(0, 255, 0));
                    }

                    Boolean    intersectOccurance = false;
                    List <int> intersectIndicies  = new List <int>();
                    for (int i = 0; i < this.controls.Count; i++)
                    {
                        if (this.controls[i].boundingRect.IntersectsWith(featureRect))
                        {
                            double diff = fingerDepth - this.controls[i].depth;
                            if (Math.Abs(diff) < 0.5)
                            {
                                intersectOccurance = true;
                                intersectIndicies.Add(i);
                            }
                        }
                    }

                    System.Text.StringBuilder append = new System.Text.StringBuilder();
                    if (intersectOccurance)
                    {
                        for (int i = 0; i < intersectIndicies.Count; i++)
                        {
                            append.Append(" " + this.controls[intersectIndicies[i]].title + " " + intersectIndicies[i].ToString());
                        }
                        this.OutputText = "Pressed Button" + append; //TODO Make this more obvious
                    }
                    else
                    {
                        this.OutputText = "No State";
                    }
                }
            }

            bitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(testMat);
            testMat.Dispose();
        }
Пример #15
0
        static void Main(string[] args)
        {
            var srcImage = new Mat(@"..\..\Images\cvlbl.png");
            Cv2.ImShow("Source", srcImage);
            Cv2.WaitKey(1); // do events

            var binaryImage = new Mat(srcImage.Size(), MatType.CV_8UC1);

            Cv2.CvtColor(srcImage, binaryImage, ColorConversion.BgrToGray);
            Cv2.Threshold(binaryImage, binaryImage, thresh: 100, maxval: 255, type: ThresholdType.Binary);

            var detectorParams = new SimpleBlobDetector.Params
            {
                //MinDistBetweenBlobs = 10, // 10 pixels between blobs
                //MinRepeatability = 1,

                //MinThreshold = 100,
                //MaxThreshold = 255,
                //ThresholdStep = 5,

                FilterByArea = false,
                //FilterByArea = true,
                //MinArea = 0.001f, // 10 pixels squared
                //MaxArea = 500,

                FilterByCircularity = false,
                //FilterByCircularity = true,
                //MinCircularity = 0.001f,

                FilterByConvexity = false,
                //FilterByConvexity = true,
                //MinConvexity = 0.001f,
                //MaxConvexity = 10,

                FilterByInertia = false,
                //FilterByInertia = true,
                //MinInertiaRatio = 0.001f,

                FilterByColor = false
                //FilterByColor = true,
                //BlobColor = 255 // to extract light blobs
            };
            var simpleBlobDetector = new SimpleBlobDetector(detectorParams);
            var keyPoints = simpleBlobDetector.Detect(binaryImage);

            Console.WriteLine("keyPoints: {0}", keyPoints.Length);
            foreach (var keyPoint in keyPoints)
            {
                Console.WriteLine("X: {0}, Y: {1}", keyPoint.Pt.X, keyPoint.Pt.Y);
            }

            var imageWithKeyPoints = new Mat();
            Cv2.DrawKeypoints(
                    image: binaryImage,
                    keypoints: keyPoints,
                    outImage: imageWithKeyPoints,
                    color: Scalar.FromRgb(255, 0, 0),
                    flags: DrawMatchesFlags.DrawRichKeypoints);

            Cv2.ImShow("Key Points", imageWithKeyPoints);
            Cv2.WaitKey(1); // do events

            Cv2.WaitKey(0);

            Cv2.DestroyAllWindows();
            srcImage.Dispose();
            imageWithKeyPoints.Dispose();
        }
Пример #16
0
        static void Main(string[] args)
        {
            //Thread capturaVideoThread = new Thread(new ThreadStart(Program.CapturarVideo));
            //capturaVideoThread.Start();

            VideoCapture captura = new VideoCapture("D:\\Dictuc\\out1.avi");
            VideoWriter  salida  = new VideoWriter("D:\\Dictuc\\outSegmentado.avi", FourCC.XVID, 10.0, new Size(captura.FrameWidth, captura.FrameHeight), true);

            Mat imagenProcesada = new Mat();
            int numImg          = 0;

            while (true)
            {
                //captura.Read(imagen);
                imagen = Cv2.ImRead("D:\\uvas2.jpg");
                mutex.WaitOne();
                imagen.CopyTo(imagenProcesada);
                mutex.ReleaseMutex();
                Mat          imagenRuidoFiltrado     = FiltradoRuido(imagenProcesada);
                Mat          imagenGrisContraste     = EscalaGrisesEqualizada(imagenRuidoFiltrado);
                Mat          imagenGrisFrecAltasProc = FrecuenciasAltasPotenciadasContraste(imagenGrisContraste);
                EdgeDetector edgeDetector            = new EdgeDetector()
                {
                    Threshold           = (byte)18,
                    SparseDistance      = 3,
                    WeightPreviousPoint = (float)2.0,
                    WeightCurrentPoint  = (float)1.0,
                    WeightAfterPoint    = (float)2.0,
                };

                EdgeDetector edgeDetector2 = new EdgeDetector()
                {
                    Threshold           = (byte)20,
                    SparseDistance      = 5,
                    WeightPreviousPoint = (float)0.5,
                    WeightCurrentPoint  = (float)1.0,
                    WeightAfterPoint    = (float)0.5,
                };

                Mat imagenBordes = edgeDetector.EdgeImage(imagenGrisContraste);
                Mat imagenBordes2 = edgeDetector2.EdgeImage(imagenGrisContraste);
                Mat imagenBinaria, imagenAberturaRelleno;
                CalculoMatrizBinariaYRelleno(imagenBordes2, out imagenBinaria, out imagenAberturaRelleno);

                Mat mascaraInv = 255 - imagenAberturaRelleno;

                Mat DistSureFg  = new Mat();
                Mat AreasSureFg = new Mat();
                Mat Unknown     = new Mat();
                AreasSureFg += 1;
                Cv2.DistanceTransform(imagenAberturaRelleno, DistSureFg, DistanceTypes.L1, DistanceMaskSize.Mask5);
                int numAreas = Cv2.ConnectedComponents(imagenAberturaRelleno, AreasSureFg, PixelConnectivity.Connectivity8);

                float[,] distValues = new float[DistSureFg.Rows, DistSureFg.Cols];

                for (int i = 0; i < DistSureFg.Rows; i++)
                {
                    for (int j = 0; j < DistSureFg.Cols; j++)
                    {
                        distValues[i, j] = DistSureFg.At <float>(i, j);
                    }
                }

                Segment[] segments = new Segment[numAreas];

                for (int i = 0; i < AreasSureFg.Rows; i++)
                {
                    for (int j = 0; j < AreasSureFg.Cols; j++)
                    {
                        int   m = AreasSureFg.At <Int32>(i, j);
                        byte  pixelSurrounding = 0;
                        float distance         = (float)0;

                        //if (i >= 1)
                        //{
                        //    distance = distValues[i - 1, j];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_LEFT;
                        //    }
                        //}
                        //if (i < AreasSureFg.Rows - 1)
                        //{
                        //    distance = distValues[i + 1, j];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_RIGHT;
                        //    }
                        //}
                        //if (j >= 1)
                        //{
                        //    distance = distValues[i, j - 1];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_DOWN;
                        //    }
                        //}
                        //if (j < AreasSureFg.Cols - 1)
                        //{
                        //    distance = distValues[i, j + 1];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_UP;
                        //    }
                        //}

                        SegmentPixelData newPixel = new SegmentPixelData()
                        {
                            Distance          = distValues[i, j],
                            CoordsXY          = new int[] { i, j },
                            Concave           = 0,
                            Indexes           = new int[] { -1, -1 },
                            PixelsSurrounding = pixelSurrounding,
                            SubsegmentLabel   = 0,
                        };

                        if (segments[m] == null)
                        {
                            segments[m] = new Segment()
                            {
                                SegmentId = m,
                                PixelData = new List <SegmentPixelData>(),
                            };
                        }
                        else
                        {
                            segments[m].MaxDistance = (segments[m].MaxDistance > newPixel.Distance) ? (int)segments[m].MaxDistance : (int)newPixel.Distance;
                            segments[m].PixelData.Add(newPixel);
                        }
                    }
                }

                Mat Centroides = new Mat();
                imagenAberturaRelleno.CopyTo(Centroides);
                var indexadorCentroides = Centroides.GetGenericIndexer <byte>();
                var indexadorFiguras    = AreasSureFg.GetGenericIndexer <Int32>();

                foreach (var s in segments.Where(s => s.Circularity <= 0.9))
                {
                    int distancia = 0;
                    if (s.Circularity > 0.7)
                    {
                        distancia = 5;
                    }
                    else if (s.Circularity > 0.5)
                    {
                        distancia = 5;
                    }
                    else if (s.Circularity > 0.25)
                    {
                        distancia = 6;
                    }
                    else
                    {
                        distancia = 6;
                    }

                    distancia = (distancia < s.MaxDistance) ? distancia : s.MaxDistance - 1;

                    foreach (var p in s.PixelData.Where(p => p.Distance <= distancia))
                    {
                        if (imagenAberturaRelleno.At <byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                        {
                            indexadorCentroides[p.CoordsXY[0], p.CoordsXY[1]] = 0;
                        }
                    }
                }

                Cv2.Subtract(imagenAberturaRelleno + 255, Centroides, Unknown);

                #region segmentStuff
                //List<int> indexConcavos = segments.Where(s => s.Circularity > 1).Select(s => s.SegmentId).ToList();


                //foreach (var s in segments.Where(s => s.Circularity < 1.1 && s.Circularity > 0.9))
                //{
                //    foreach (var p in s.PixelData/*.Where(p => p.Distance == 1)*/)
                //    {
                //        if (imagenAberturaRelleno.At<byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments.Where(s => s.Circularity >= 1.1))
                //{
                //    foreach (var p in s.PixelData/*.Where(p => p.Distance == 1)*/)
                //    {
                //        if (imagenAberturaRelleno.At<byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments)
                //{
                //    s.SetPixelConcavity();
                //    s.Segmentation();
                //    foreach (var p in s.PixelData.Where(p => p.Distance == 1))
                //    {
                //        if (p.Concave == 1)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //        if (p.Concave == -1)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments)
                //{
                //    //s.SetPixelConcavity();
                //    //s.Segmentation();
                //    foreach (var p in s.PixelData.Where(p => p.Distance == 2))
                //    {
                //        indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 230;
                //    }
                //}

                //imagenAberturaRelleno.CopyTo(SureFg);
                #endregion

                Mat colormap   = new Mat();
                Mat Marcadores = new Mat();
                Cv2.ConnectedComponents(Centroides, Marcadores);
                Marcadores = Marcadores + 1;
                var indexador2 = Marcadores.GetGenericIndexer <Int32>();
                for (int i = 0; i < Unknown.Rows; i++)
                {
                    for (int j = 0; j < Unknown.Cols; j++)
                    {
                        if (Unknown.At <byte>(i, j) == 255)
                        {
                            indexador2[i, j] = 0;
                        }
                    }
                }

                Marcadores.CopyTo(colormap);
                colormap.ConvertTo(colormap, MatType.CV_8UC3);
                Cv2.ApplyColorMap(colormap, colormap, ColormapTypes.Rainbow);
                Cv2.ImWrite("D:\\Dictuc\\marcadores.png", Marcadores);

                //Mat img1 = new Mat();
                //imagen.CopyTo(img1);
                Mat DistColor = new Mat();
                //imagenGrisContraste = 255 - imagenGrisContraste;
                Cv2.CvtColor(imagenAberturaRelleno, DistColor, ColorConversionCodes.GRAY2BGR);
                DistColor.ConvertTo(DistColor, MatType.CV_8U);

                Cv2.Watershed(DistColor, Marcadores);


                Cv2.ImWrite("D:\\Dictuc\\watersheedIn.png", DistColor);

                var indexador4 = imagen.GetGenericIndexer <Vec3i>();
                //for (int i = 0; i < imagen.Rows; i++)
                //{
                //    for (int j = 0; j < imagen.Cols; j++)
                //    {
                //        //if (Centroides.At<byte>(i, j) > 0)
                //        //    indexador4[i, j] = new Vec3i(0, 0, 255);
                //        if (Marcadores.At<Int32>(i, j) == -1)
                //            indexador4[i, j] = new Vec3i(255, 20, 20);
                //    }
                //}


                for (int i = 0; i < imagen.Rows; i++)
                {
                    for (int j = 0; j < imagen.Cols; j++)
                    {
                        //if (Centroides.At<byte>(i, j) > 0)
                        //    indexador4[i, j] = new Vec3i(0, 0, 255);
                        if (imagenBordes.At <char>(i, j) > 0)
                        {
                            indexador4[i, j] = new Vec3i(255, 20, 20);
                        }
                    }
                }

                Mat seg = new Mat();
                Marcadores.CopyTo(seg);
                var indexador5 = seg.GetGenericIndexer <int>();
                for (int i = 0; i < Marcadores.Rows; i++)
                {
                    for (int j = 0; j < Marcadores.Cols; j++)
                    {
                        indexador5[i, j] = (Math.Abs(indexador5[i, j]) > 1) ? 255 : 0;
                    }
                }
                Mat kE1 = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(1, 1));
                Cv2.Erode(seg, seg, kE1, iterations: 3);
                int thrs1 = 1500;
                int thrs2 = 1800;
                Mat edge1 = new Mat();
                seg.ConvertTo(seg, MatType.CV_8U);
                Cv2.Canny(seg, edge1, thrs1, thrs2, apertureSize: 5);

                SimpleBlobDetector.Params params1 = new SimpleBlobDetector.Params()
                {
                    MinThreshold        = 0,
                    MaxThreshold        = 255,
                    FilterByArea        = true,
                    MinArea             = 15,
                    FilterByCircularity = false,
                    MinCircularity      = (float)0.01,
                    FilterByConvexity   = false,
                    MinConvexity        = (float)0.1,
                    FilterByInertia     = false,
                    MinInertiaRatio     = (float)0.01,
                };
                SimpleBlobDetector detectorBlobs = SimpleBlobDetector.Create(params1);
                KeyPoint[]         segmentosBlob = detectorBlobs.Detect(edge1);

                Mat segmentosBlobMat = new Mat(1, segmentosBlob.Count(), MatType.CV_32FC1);
                var indexador6       = segmentosBlobMat.GetGenericIndexer <float>();
                for (int i = 0; i < segmentosBlob.Count(); i++)
                {
                    indexador6[0, i] = segmentosBlob[i].Size;
                }

                Mat      hist   = new Mat();
                Rangef[] ranges = { new Rangef(0, (float)segmentosBlob.Max(x => x.Size)) };
                Cv2.CalcHist(new Mat[] { segmentosBlobMat }, new int[] { 0 }, null, hist, 1, new int[] { 100 }, ranges, uniform: true, accumulate: true);
                float[] histAcumulado           = new float[hist.Rows];
                float[] histAcumuladoPorcentaje = new float[11];

                histAcumulado[0] = hist.At <float>(0, 0);

                for (int i = 1; i < hist.Rows; i++)
                {
                    histAcumulado[i] = hist.At <float>(i, 0) + histAcumulado[i - 1];
                }

                int k = 1;
                for (int i = 1; i < histAcumuladoPorcentaje.Count(); i++)
                {
                    for (; k < hist.Rows; k++)
                    {
                        float porcentajeActual    = histAcumulado[k] / segmentosBlob.Count() * 100;
                        float porcentajeAnterior  = histAcumulado[k - 1] / segmentosBlob.Count() * 100;
                        float porcentajeRequerido = (float)((i < 10) ? i * 10 : 99.3);
                        if (porcentajeRequerido <= porcentajeActual)
                        {
                            float tamañoPorcentajeActual        = (float)(k * (float)segmentosBlob.Max(x => x.Size) / 100.0);
                            float tamañoPorcentajeAnterior      = (float)((k - 1) * (float)segmentosBlob.Max(x => x.Size) / 100.0);
                            float tasaVariacionTamañoPorcentaje = (tamañoPorcentajeActual - tamañoPorcentajeAnterior) / (porcentajeActual - porcentajeAnterior);
                            histAcumuladoPorcentaje[i] = tamañoPorcentajeAnterior + tasaVariacionTamañoPorcentaje * (i * 10 - porcentajeAnterior);
                            break;
                        }
                    }
                }

                for (int i = 0; i < histAcumuladoPorcentaje.Count(); i++)
                {
                    Console.Write(histAcumuladoPorcentaje[i] + ",");
                }
                Console.WriteLine("");

                //            data1 = [];

                //              for i in range(0, len(keypoints1)):

                //                data1.append(keypoints1[i].size * coefTamano)
                //                #tamano.write(str(i)+'\t'+str(keypoints1[i].size*2*0.3)+'\n')
                //  cv2.line(im_with_keypoints1, (int(float(keypoints1[i].pt[0] - keypoints1[i].size)), int(float(keypoints1[i].pt[1]))), (int(float(keypoints1[i].pt[0] + keypoints1[i].size)), int(float(keypoints1[i].pt[1]))), (255, 0, 0), 1)

                //                cv2.line(im_with_keypoints1, (int(float(keypoints1[i].pt[0])), int(float(keypoints1[i].pt[1] - keypoints1[i].size))), (int(float(keypoints1[i].pt[0])), int(float(keypoints1[i].pt[1] + keypoints1[i].size))), (255, 0, 0), 1)


                //# print(data1)
                //n1, bins1, patches1 = hist(data1, 200,[0, max(data1)], normed = 100, cumulative = True, bottom = True, histtype = 'stepfilled', align = 'mid', orientation = 'vertical', rwidth = 1, log = False, color = "r")

                //              tamano = open(temp + "instancia_" + instancia + ".txt", "w")


                //              x = np.array(bins1)

                //              y = np.append([0], n1)

                //                  xnew = [x[1], x[21], x[36], x[45], x[53], x[60], x[69], x[78], x[88], x[97], x[200]]
                //ynew = [y[1], y[21], y[36], y[45], y[53], y[60], y[69], y[78], y[88], y[97], y[200]]

                //tamano.write('INSERT INTO [dbo].[Granulometria](Cod_Instancia,Fecha,P_10,P_20,P_30,P_40,P_50,P_60,P_70,P_80,P_90,P_100, Filename) values (')
                //tamano.write(instancia + ",CONVERT(datetime, '" + sys.argv[1][0:4] + "-" + sys.argv[1][4:6] + "-" + sys.argv[1][6:8] + ' ' + sys.argv[1][9:11] + ':' + sys.argv[1][11:13] + ':' + sys.argv[1][13:15] + "', 120)")

                //for j in range(1, len(xnew)):
                //  #tamano.write (str(j)+'\t'+str(round(xnew[j],1))+'\t'+str(round(ynew[j]*100,2))+'\n')
                //  tamano.write(',' + str(round(xnew[j], 1)))

                //tamano.write(",'" + sys.argv[1] + " - Resultado.jpg'")
                //tamano.write(')')

                //CvXImgProc.Thinning(mascaraInv, mascaraInv, ThinningTypes.ZHANGSUEN);

                Mat imWithKeypoints1 = new Mat();
                Cv2.DrawKeypoints(imagen, segmentosBlob, imWithKeypoints1, new Scalar(0, 0, 255), DrawMatchesFlags.DrawRichKeypoints);


                var dataTamaños = segmentosBlob.Select(s => s.Size).ToArray();


                Cv2.ImWrite("D:\\Dictuc\\output0" + numImg + ".png", imagen);
                Cv2.ImWrite("D:\\Dictuc\\output1" + numImg++ + ".png", imWithKeypoints1);

                Cv2.ImShow("Segmentado", imagen);
                Cv2.ImShow("GrisContraste", imagenGrisContraste);
                Cv2.ImShow("bordes90", imagenBordes);
                Cv2.ImShow("bordes50", imagenBordes2);

                salida.Write(imagen);

                //System.Threading.Thread.Sleep(10);
                Cv2.WaitKey(10);

                imagenRuidoFiltrado.Release();
                imagenGrisContraste.Release();
                imagenGrisFrecAltasProc.Release();
                imagenBordes.Release();
                imagenBinaria.Release();
                imagenAberturaRelleno.Release();
            }
        }
Пример #17
0
        static void Main(string[] args)
        {
            var srcImage = new Mat(@"..\..\Images\cvlbl.png");

            Cv2.ImShow("Source", srcImage);
            Cv2.WaitKey(1); // do events


            var binaryImage = new Mat(srcImage.Size(), MatType.CV_8UC1);

            Cv2.CvtColor(srcImage, binaryImage, ColorConversion.BgrToGray);
            Cv2.Threshold(binaryImage, binaryImage, thresh: 100, maxval: 255, type: ThresholdType.Binary);

            var detectorParams = new SimpleBlobDetector.Params
            {
                //MinDistBetweenBlobs = 10, // 10 pixels between blobs
                //MinRepeatability = 1,

                //MinThreshold = 100,
                //MaxThreshold = 255,
                //ThresholdStep = 5,

                FilterByArea = false,
                //FilterByArea = true,
                //MinArea = 0.001f, // 10 pixels squared
                //MaxArea = 500,

                FilterByCircularity = false,
                //FilterByCircularity = true,
                //MinCircularity = 0.001f,

                FilterByConvexity = false,
                //FilterByConvexity = true,
                //MinConvexity = 0.001f,
                //MaxConvexity = 10,

                FilterByInertia = false,
                //FilterByInertia = true,
                //MinInertiaRatio = 0.001f,

                FilterByColor = false
                                //FilterByColor = true,
                                //BlobColor = 255 // to extract light blobs
            };
            var simpleBlobDetector = new SimpleBlobDetector(detectorParams);
            var keyPoints          = simpleBlobDetector.Detect(binaryImage);

            Console.WriteLine("keyPoints: {0}", keyPoints.Length);
            foreach (var keyPoint in keyPoints)
            {
                Console.WriteLine("X: {0}, Y: {1}", keyPoint.Pt.X, keyPoint.Pt.Y);
            }

            var imageWithKeyPoints = new Mat();

            Cv2.DrawKeypoints(
                image: binaryImage,
                keypoints: keyPoints,
                outImage: imageWithKeyPoints,
                color: Scalar.FromRgb(255, 0, 0),
                flags: DrawMatchesFlags.DrawRichKeypoints);


            Cv2.ImShow("Key Points", imageWithKeyPoints);
            Cv2.WaitKey(1); // do events


            Cv2.WaitKey(0);

            Cv2.DestroyAllWindows();
            srcImage.Dispose();
            imageWithKeyPoints.Dispose();
        }
Пример #18
0
        public int Looper()
        {
            VideoCapture cap;

            //Try to initialise the VideoCapture with the default camera. Catches NullReferenceException
            try
            {
                cap = new VideoCapture(2);
            }
            catch (NullReferenceException e)
            {
                Debug.WriteLine(e.Message);
                return(1);
            }
            cap.Read(capture);
            CvInvoke.NamedWindow("source", NamedWindowType.AutoSize);
            CvInvoke.Imshow("source", capture);
            CvInvoke.WaitKey(0);
            System.Threading.Thread.Sleep(1000);
            cap.Read(capture);
            //Check if camera is opened. If not, exit the looper with code 2.
            if (!cap.IsOpened)
            {
                return(2);
            }
            int counter = 0;

            MCvScalar scalar = new MCvScalar(1);

            //Create infinite loop for checking whether there is a blob on the play button
            while (true)
            {
                //Stores image from camera into capture
                cap.Read(capture);
                //Make the image grayscale, use median blur with a kernel size of 5 then threshold it
                CvInvoke.CvtColor(capture, capture, ColorConversion.Bgr2Gray, 0);
                CvInvoke.MedianBlur(capture, capture, 5);
                CvInvoke.Threshold(capture, capture, 215, 255, ThresholdType.Binary);
                //Eroding and dilating to get rid of most of the noise
                CvInvoke.Erode(capture, capture,
                               CvInvoke.GetStructuringElement(ElementShape.Ellipse, size, point),
                               point, 3, BorderType.Default, new MCvScalar(1.0));
                CvInvoke.Dilate(capture, capture,
                                CvInvoke.GetStructuringElement(ElementShape.Ellipse, size, point),
                                point, 5, BorderType.Default, new MCvScalar(1.0));
                //Another median blur with a kernel size of 5 in order to smooth out the image and get rid of any remaining noise.
                CvInvoke.MedianBlur(capture, capture, 5);
                //Show capture in a named window
                CvInvoke.NamedWindow("source", NamedWindowType.AutoSize);
                CvInvoke.Imshow("source", capture);
                //Invert capture to check for blobs. Use SimpleBlobDetector to find them, then show them in a separate window.
                CvInvoke.BitwiseNot(capture, invertedCapture);
                if (!coordsLocked)
                {
                    LockCoords();
                }
                VectorOfKeyPoint keyPoints = new VectorOfKeyPoint(detector.Detect(invertedCapture));
                Features2DToolbox.DrawKeypoints(invertedCapture, keyPoints, captureWithKeypoints, new Bgr(0, 0, 255));
                CvInvoke.NamedWindow("KeyPoints", NamedWindowType.AutoSize);
                CvInvoke.Imshow("KeyPoints", captureWithKeypoints);
                //Loop through all the keypoints found by the SimpleBlobDetector.
                //If there is one on the play button, it will add 1 to the counter and the thread will sleep for 1/30 of a second.
                for (int i = 0; i < keyPoints.Size; i++)
                {
                    int x;
                    int y;
                    x = (int)keyPoints[i].Point.X;
                    y = (int)keyPoints[i].Point.Y;
                    Debug.WriteLine(x);
                    Debug.WriteLine(y);
                    if (466 + 37.5 > x && 466 - 37.5 < x && y < 65 + 37.5 && y > 65 - 37.5)
                    {
                        counter++;
                        System.Threading.Thread.Sleep(1000 / 30);
                        if (counter == 30)
                        {
                            //If the counter has reached 30, it'll run the CheckNotes() function and then reset the counter.
                            CheckNotes();
                            counter = 0;
                        }
                    }
                }
                //Exit capture by pressing escape
                if (CvInvoke.WaitKey(10) == 27)
                {
                    break;
                }
            }
            return(0);
        }
Пример #19
0
        private void GenerateWorkspace()
        {
            //========== Objects and Variables ==========
            Mat captureFrame;
            Image <Gray, byte> processFrame;
            Image <Gray, byte> processContourFrame;
            Image <Gray, byte> workspaceFrame;

            MKeyPoint[] squareBlobs;
            MKeyPoint[] triBlobs;

            VectorOfVectorOfPoint processFrameContours = new VectorOfVectorOfPoint();
            byte processFrameThreshold = 150;

            //========== Square and Triangle Blob Detector Config ==========

            SimpleBlobDetectorParams squareBlobDetectorParams = new SimpleBlobDetectorParams();
            SimpleBlobDetectorParams triBlobDetectorParams    = new SimpleBlobDetectorParams();

            squareBlobDetectorParams.FilterByArea        = true;
            squareBlobDetectorParams.FilterByCircularity = false;
            squareBlobDetectorParams.FilterByColor       = false;
            squareBlobDetectorParams.FilterByInertia     = false;
            squareBlobDetectorParams.FilterByConvexity   = false;
            squareBlobDetectorParams.MinArea             = 10000;
            squareBlobDetectorParams.MaxArea             = 100000;
            squareBlobDetectorParams.MaxCircularity      = 1;
            squareBlobDetectorParams.MinCircularity      = 0.67f;
            squareBlobDetectorParams.blobColor           = 255;
            SimpleBlobDetector squareBlobDetector = new SimpleBlobDetector(squareBlobDetectorParams);

            triBlobDetectorParams.FilterByArea        = true;
            triBlobDetectorParams.FilterByCircularity = false;
            triBlobDetectorParams.FilterByColor       = false;
            triBlobDetectorParams.FilterByInertia     = false;
            triBlobDetectorParams.FilterByConvexity   = false;
            triBlobDetectorParams.MinArea             = 2000;
            triBlobDetectorParams.MaxArea             = 9999;
            triBlobDetectorParams.MaxCircularity      = 0.66f;
            triBlobDetectorParams.MinCircularity      = 0.01f;
            triBlobDetectorParams.blobColor           = 255;
            SimpleBlobDetector triBlobDetector = new SimpleBlobDetector(triBlobDetectorParams);

            //========== Begin Shape Detection Algorithm ==========

            while (_capture.IsOpened)
            {
                //==== Pull Image from the Webcam ====
                captureFrame = _capture.QueryFrame();

                //==== Scrub Captured Frame ====
                //this optomizes the image for paper edge detection
                Image <Bgr, byte> processFrameBGR = captureFrame.ToImage <Bgr, byte>();
                Image <Hsv, byte> processFrameHSV = processFrameBGR.Convert <Hsv, byte>();
                processFrameHSV = processFrameHSV.SmoothMedian(9);
                Image <Gray, byte>[] procssFrameHSVBreakouts = processFrameHSV.Split();
                procssFrameHSVBreakouts[2]._EqualizeHist();
                //darken any colors in the image
                procssFrameHSVBreakouts[2] -= procssFrameHSVBreakouts[1] * 1.5 * (procssFrameHSVBreakouts[2].GetAverage().Intensity / 255.0);
                processFrame = procssFrameHSVBreakouts[2];
                //convert to a binary image
                processFrame._ThresholdBinary(new Gray(processFrameThreshold), new Gray(255));

                //==== Detect Paper Border ====
                CvInvoke.FindContours(processFrame, processFrameContours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
                int    largestContourIndex = 0;
                double largestContour      = 0;
                double currentContour      = 0;
                for (int i = 0; i < processFrameContours.Size; i++)
                {
                    currentContour = CvInvoke.ContourArea(processFrameContours[i], false);
                    if (currentContour > largestContour)
                    {
                        largestContour      = currentContour;
                        largestContourIndex = i;
                    }
                }
                processContourFrame = new Image <Gray, byte>(new Size(captureFrame.Size.Width, captureFrame.Size.Height));
                CvInvoke.DrawContours(captureFrame, processFrameContours, largestContourIndex, new MCvScalar(255, 0, 0), 3);
                VectorOfPoint contourPoints = new VectorOfPoint();
                CvInvoke.ApproxPolyDP(processFrameContours[largestContourIndex], contourPoints, CvInvoke.ArcLength(processFrameContours[largestContourIndex], true) * 0.06, true);
                Point[] workspaceCorners = contourPoints.ToArray();

                if (workspaceCorners.Length == 4)
                {
                    Point[] temp = new Point[workspaceCorners.Length];
                    temp[0]          = workspaceCorners[2];
                    temp[1]          = workspaceCorners[3];
                    temp[2]          = workspaceCorners[0];
                    temp[3]          = workspaceCorners[1];
                    workspaceCorners = temp;

                    //averages the paper corners over the last five frames
                    workspaceCorners = workspaceCornersAverage(workspaceCorners);
                    for (int i = 0; i < workspaceCorners.Length; i++)
                    {
                        CvInvoke.Circle(captureFrame, workspaceCorners[i], 6, new MCvScalar(0, 255, 0), -1);
                        CvInvoke.PutText(captureFrame, i.ToString(), workspaceCorners[i], Emgu.CV.CvEnum.FontFace.HersheyPlain, 3, new MCvScalar(0, 0, 255), 2);
                    }

                    //==== Warp Paper Perspective ====
                    workspaceFrame = processFrame;
                    IEnumerable <Point> query  = workspaceCorners.OrderBy(point => point.Y).ThenBy(point => point.X);
                    PointF[]            ptsSrc = new PointF[4];
                    PointF[]            ptsDst = new PointF[] { new PointF(0, 0), new PointF(workspaceFrame.Width - 1, 0), new PointF(0, workspaceFrame.Height - 1), new PointF(workspaceFrame.Width - 1, workspaceFrame.Height - 1) };
                    for (int i = 0; i < 4; i++)
                    {
                        ptsSrc[i] = new PointF(query.ElementAt(i).X, query.ElementAt(i).Y);
                    }

                    using (var matrix = CvInvoke.GetPerspectiveTransform(ptsSrc, ptsDst))
                    {
                        using (var cutImagePortion = new Mat())
                        {
                            CvInvoke.WarpPerspective(workspaceFrame, cutImagePortion, matrix, new Size(workspaceFrame.Width, workspaceFrame.Height), Inter.Cubic);
                            workspaceFrame = cutImagePortion.ToImage <Gray, Byte>().Flip(FlipType.Vertical).Flip(FlipType.Horizontal) /*.Rotate(180, new Gray(0),false)*/;
                        }
                    }

                    //==== Detect Blobs on Warped Image ====
                    squareBlobs = squareBlobDetector.Detect(workspaceFrame);
                    triBlobs    = triBlobDetector.Detect(workspaceFrame);


                    //==== Transfer Blobs To Shape Object Array ====
                    Shape[] foundShapes = new Shape[squareBlobs.Length + triBlobs.Length];

                    for (int i = 0; i < squareBlobs.Length; i++)
                    {
                        foundShapes[i].position.X = (85 * squareBlobs[i].Point.X / workspaceFrame.Width);
                        foundShapes[i].position.Y = (110 * squareBlobs[i].Point.Y / workspaceFrame.Height);
                        foundShapes[i].type       = (int)Shape.Type.Square;
                        Point Keypoint = new Point((int)squareBlobs[i].Point.X, (int)squareBlobs[i].Point.Y);
                        CvInvoke.Circle(workspaceFrame, Keypoint, 6, new MCvScalar(150, 150, 0), -1);
                        CvInvoke.PutText(workspaceFrame, "Sq", Keypoint, Emgu.CV.CvEnum.FontFace.HersheyPlain, 2, new MCvScalar(150, 150, 0), 2);
                    }

                    for (int i = 0; i < triBlobs.Length; i++)
                    {
                        foundShapes[i + squareBlobs.Length].position.X = (85 * triBlobs[i].Point.X / workspaceFrame.Width);
                        foundShapes[i + squareBlobs.Length].position.X = (110 * triBlobs[i].Point.X / workspaceFrame.Height);
                        foundShapes[i + squareBlobs.Length].type       = (int)Shape.Type.Triangle;
                        Point Keypoint = new Point((int)triBlobs[i].Point.X, (int)triBlobs[i].Point.Y);
                        CvInvoke.Circle(workspaceFrame, Keypoint, 6, new MCvScalar(150, 150, 0), -1);

                        CvInvoke.PutText(workspaceFrame, "Tri", Keypoint, Emgu.CV.CvEnum.FontFace.HersheyPlain, 2, new MCvScalar(150, 150, 0), 2);
                    }

                    //==== Sort Shapes by Order (Front to Back on Paper) ====
                    IEnumerable <Shape> shapee = foundShapes.OrderBy(position => position.position.Y);
                    foundShapes = shapee.ToArray();

                    String output = "Shapes: ";

                    for (int i = 0; i < foundShapes.Length; i++)
                    {
                        if (foundShapes[i].type == (int)Shape.Type.Triangle)
                        {
                            output += "T";
                        }
                        else
                        {
                            output += "S";
                        }

                        output += foundShapes[i].position.Y;
                        output += " ";
                    }

                    Invoke(new Action(() =>
                    {
                        shapes = foundShapes; //this is the global shape array
                    }));

                    //==== Display the Important Images ====
                    DisplayFrames(captureFrame, processFrame, workspaceFrame);
                }
                else
                {
                    DisplayFrames(captureFrame, processFrame, processFrame);
                }
            }
        }