public static void Run()
        {
            string imagePath = "./images/geo.jpg";
            Mat    img       = Cv2.ImRead(imagePath);

            SimpleBlobDetector.Params blobDetectorParams = new SimpleBlobDetector.Params();
            blobDetectorParams.MinThreshold = 10;
            blobDetectorParams.MaxThreshold = 100;
            FlannBasedMatcher matcher = new FlannBasedMatcher(null, null);

            blobDetectorParams.FilterByArea = true;
            blobDetectorParams.MinArea      = 150;

            blobDetectorParams.FilterByCircularity = true;
            blobDetectorParams.MinCircularity      = 0.3f;

            blobDetectorParams.FilterByConvexity = true;
            blobDetectorParams.MinConvexity      = 0.2f;

            blobDetectorParams.FilterByInertia = true;
            blobDetectorParams.MinInertiaRatio = 0.1f;

            var detector  = SimpleBlobDetector.Create(blobDetectorParams);
            var keypoints = detector.Detect(img);
            Mat output    = new Mat();

            Cv2.DrawKeypoints(img, keypoints, output, new Scalar(0, 0, 255), DrawMatchesFlags.DrawRichKeypoints);

            Cv2.ImShow("Output", output);
            Cv2.WaitKey(0);
        }
        protected override void SandwormSolveInstance(IGH_DataAccess DA)
        {
            SetupLogging();
            markerColors = new List <Color>();
            DA.GetDataList(0, markerColors);
            DA.GetData(1, ref colorFuzz);
            GetSandwormOptions(DA, 2, 0, 0);
            SetupKinect();
            Core.LogTiming(ref output, timer, "Initial setup"); // Debug Info

            var binaryImage = GenerateColorImage();

            Core.LogTiming(ref output, timer, "Image generation"); // Debug Info
            if (binaryImage != null)
            {
                // Search image for the color and identify/classify
                var keyPoints          = new List <KeyPoint>();
                var detectorParameters = new SimpleBlobDetector.Params
                {
                    FilterByArea        = true,
                    FilterByColor       = true, // If it doesn't work; pre-filter the image
                    MinDistBetweenBlobs = 1,
                    MinArea             = 10,
                    MaxArea             = 20
                };
                Core.LogTiming(ref output, timer, "Detector setup"); // Debug Info

                foreach (Color markerColor in markerColors)
                {
                    var blobDetector = SimpleBlobDetector.Create(detectorParameters);
                    keyPoints.AddRange(blobDetector.Detect(binaryImage));
                    blobDetector.Dispose();
                }
                Core.LogTiming(ref output, timer, "Image blob detection"); // Debug Info

                // Translate identified points back into Grasshopper geometry
                markerPoints = new List <Point3d>();
                foreach (KeyPoint keyPoint in keyPoints)
                {
                    var x = keyPoint.Pt.X;
                    var y = keyPoint.Pt.Y;
                    markerPoints.Add(new Point3d(x, y, 0));
                }
                DA.SetDataList(0, markerPoints);
                Core.LogTiming(ref output, timer, "Blob output"); // Debug Info
            }
            else
            {
                // TODO: add warning?
            }
            binaryImage.Dispose();

            DA.SetDataList(1, output); // For logging/debugging
            ScheduleSolve();
        }
    void CamUpdate()
    {
        CvUtil.GetWebCamMat(webCamTexture, ref mat);
        Cv2.CvtColor(mat, greyMat, ColorConversionCodes.RGBA2GRAY);
        Cv2.Threshold(greyMat, greyMat, 100, 255, ThresholdTypes.Binary);

        var detectorParams = new SimpleBlobDetector.Params
        {
            //MinDistBetweenBlobs = 10, // 10 pixels between blobs
            //MinRepeatability = 1,

            //MinThreshold = 100,
            //MaxThreshold = 255,
            //ThresholdStep = 5,

            FilterByArea = false,
            //FilterByArea = true,
            //MinArea = 0.001f, // 10 pixels squared
            //MaxArea = 500,

            FilterByCircularity = false,
            //FilterByCircularity = true,
            //MinCircularity = 0.001f,

            FilterByConvexity = false,
            //FilterByConvexity = true,
            //MinConvexity = 0.001f,
            //MaxConvexity = 10,

            FilterByInertia = false,
            //FilterByInertia = true,
            //MinInertiaRatio = 0.001f,

            FilterByColor = false
                            //FilterByColor = true,
                            //BlobColor = 255 // to extract light blobs
        };
        var simpleBlobDetector = SimpleBlobDetector.Create(detectorParams);
        var keyPoints          = simpleBlobDetector.Detect(greyMat);

        Cv2.DrawKeypoints(
            image: greyMat,
            keypoints: keyPoints,
            outImage: mat,
            color: Scalar.FromRgb(255, 0, 0),
            flags: DrawMatchesFlags.DrawRichKeypoints);

        CvConvert.MatToTexture2D(mat, ref tex);
        rawImage.texture = tex;
    }
        // find circles/dots using blob detection
        private static void FindBlob(CvCapture cap, CvWindow winScr)
        {
            SimpleBlobDetector.Params blobParameters = new SimpleBlobDetector.Params();

            // threshold (gray value)
            blobParameters.MinThreshold = blobMinThreshold;
            blobParameters.MaxThreshold = blobMaxThreshold;
            // area (pixel count)
            blobParameters.FilterByArea = true;
            blobParameters.MinArea      = blobMinArea;
            blobParameters.MaxArea      = blobMaxArea;
            // circularity
            blobParameters.FilterByCircularity = true;
            blobParameters.MinCircularity      = blobMinCircularity;
            // convexity - probably not needed - maybe eleminates false positives
            blobParameters.FilterByConvexity = true;
            blobParameters.MinConvexity      = blobMinConvexity;
            //// inertia - what does the values mean exactly
            //blobParameters.FilterByInertia = true;
            //blobParameters.MinInertiaRatio =

            SimpleBlobDetector blobDetector = new SimpleBlobDetector(blobParameters);

            gray = new IplImage(cap.QueryFrame().Size, BitDepth.U8, 1);

            while (CvWindow.WaitKey(10) != 27)
            {
                IplImage iplImage = PerspectiveCorretoin.GetCorrectedImage(cap.QueryFrame());
                Cv.CvtColor(iplImage, gray, ColorConversion.RgbToGray);

                Mat mat = new Mat(gray);
                mat.PyrDown(new Size(mat.Width / 2, mat.Height / 2));

                KeyPoint[] keypoints = blobDetector.Detect(mat);

                foreach (KeyPoint item in keypoints)
                {
                    Cv.DrawCircle(gray, new CvPoint2D32f(item.Pt.X, item.Pt.Y), (int)(item.Size * 3), CvColor.Green);
                    Console.WriteLine("Found blob | size = " + item.Size);
                }
                winScr.Image = gray;
            }
        }
Beispiel #5
0
        public void Run()
        {
            using var src             = Cv2.ImRead(FilePath.Image.Shapes);
            using var detectedCircles = new Mat();
            using var detectedOvals   = new Mat();

            // Invert the image. Shapes has a black background and SimpleBlobDetector doesn't seem to work well with that.
            Cv2.BitwiseNot(src, src);

            // Parameters tuned to detect only circles
            var circleParams = new SimpleBlobDetector.Params
            {
                MinThreshold = 10,
                MaxThreshold = 230,

                // The area is the number of pixels in the blob.
                FilterByArea = true,
                MinArea      = 500,
                MaxArea      = 50000,

                // Circularity is a ratio of the area to the perimeter. Polygons with more sides are more circular.
                FilterByCircularity = true,
                MinCircularity      = 0.9f,

                // Convexity is the ratio of the area of the blob to the area of its convex hull.
                FilterByConvexity = true,
                MinConvexity      = 0.95f,

                // A circle's inertia ratio is 1. A line's is 0. An oval is between 0 and 1.
                FilterByInertia = true,
                MinInertiaRatio = 0.95f
            };

            // Parameters tuned to find the ovals in the Shapes image.
            var ovalParams = new SimpleBlobDetector.Params
            {
                MinThreshold = 10,
                MaxThreshold = 230,
                FilterByArea = true,
                MinArea      = 500,
                // The ovals are the smallest blobs in Shapes, so we limit the max area to eliminate the larger blobs.
                MaxArea             = 10000,
                FilterByCircularity = true,
                MinCircularity      = 0.58f,
                FilterByConvexity   = true,
                MinConvexity        = 0.96f,
                FilterByInertia     = true,
                MinInertiaRatio     = 0.1f
            };

            using var circleDetector = SimpleBlobDetector.Create(circleParams);
            var circleKeyPoints = circleDetector.Detect(src);

            Cv2.DrawKeypoints(src, circleKeyPoints, detectedCircles, Scalar.HotPink, DrawMatchesFlags.DrawRichKeypoints);

            using var ovalDetector = SimpleBlobDetector.Create(ovalParams);
            var ovalKeyPoints = ovalDetector.Detect(src);

            Cv2.DrawKeypoints(src, ovalKeyPoints, detectedOvals, Scalar.HotPink, DrawMatchesFlags.DrawRichKeypoints);

            using var w1 = new Window("Detected Circles", detectedCircles);
            using var w2 = new Window("Detected Ovals", detectedOvals);

            Cv2.WaitKey();
        }
        public void Labeling_example()
        {
            var srcImage = new Mat("./TextSample.png");

            Cv2.ImShow("Source", srcImage);
            Cv2.WaitKey(1);

            var binaryImage = new Mat(srcImage.Size(), MatType.CV_8UC1);

            Cv2.CvtColor(srcImage, binaryImage, ColorConversionCodes.BGRA2GRAY);
            Cv2.Threshold(binaryImage, binaryImage, thresh: 100, maxval: 255, type: ThresholdTypes.Binary);

            var detectorParams = new SimpleBlobDetector.Params
            {
                //MinDistBetweenBlobs = 10,
                //MinRepeatability = 1,

                //MinThreshold = 100,
                //MaxThreshold = 255,
                //ThresholdStep = 5,

                FilterByArea = false,
                //FilterByArea = true,
                //MinArea = 0.001f,
                //MaxArea = 500,

                FilterByCircularity = false,
                //FilterByCircularity = true,
                //MinCircularity = 0.001f,

                FilterByConvexity = false,
                //FilterByConvexity = true,
                //MinConvexity = 0.001f,
                //MaxConvexity = 10,

                FilterByInertia = false,
                //FilterByInertia = true,
                //MinInertiaRatio = 0.001f,

                FilterByColor = false
                                //FilterByColor = true,
                                //BlobColor = 255
            };
            var simpleBlobDetector = SimpleBlobDetector.Create(detectorParams);
            var keyPoints          = simpleBlobDetector.Detect(binaryImage);

            foreach (var keyPoint in keyPoints)
            {
                Debug.WriteLine("X: {0}, Y: {1}", keyPoint.Pt.X, keyPoint.Pt.Y);
            }

            var imageWithKeyPoints = new Mat();

            Cv2.DrawKeypoints(
                image: binaryImage,
                keypoints: keyPoints,
                outImage: imageWithKeyPoints,
                color: Scalar.FromRgb(255, 0, 0),
                flags: DrawMatchesFlags.DrawRichKeypoints);


            Cv2.ImShow("Key Points", imageWithKeyPoints);

            Cv2.DestroyAllWindows();
            srcImage.Dispose();
            imageWithKeyPoints.Dispose();
        }
    private void DemoIRBlobTrack()
    {
        int IRWidth  = kinectManager.IRWidth;
        int IRHeight = kinectManager.IRHeight;

        //get image and convert to threshold image
        Mat irImage = new Mat(IRHeight, IRWidth, MatType.CV_8UC4, kinectManager.IRRawData);              //rows=height, cols=width
        Mat ir8Bit  = new Mat();

        Cv2.CvtColor(irImage, ir8Bit, ColorConversionCodes.RGBA2GRAY);
        Cv2.Threshold(ir8Bit, ir8Bit, thresh: 200, maxval: 255, type: ThresholdTypes.Binary);

        //Find blobs
        SimpleBlobDetector.Params detectorParams = new SimpleBlobDetector.Params
        {
            //MinDistBetweenBlobs = 10, // 10 pixels between blobs
            //MinRepeatability = 1,

            //MinThreshold = 100,
            //MaxThreshold = 255,
            //ThresholdStep = 5,

            FilterByArea = false,
            //FilterByArea = true,
            //MinArea = 0.001f, // 10 pixels squared
            //MaxArea = 500,

            FilterByCircularity = false,
            //FilterByCircularity = true,
            //MinCircularity = 0.001f,

            FilterByConvexity = false,
            //FilterByConvexity = true,
            //MinConvexity = 0.001f,
            //MaxConvexity = 10,

            FilterByInertia = false,
            //FilterByInertia = true,
            //MinInertiaRatio = 0.001f,

            FilterByColor = false
                            //FilterByColor = true,
                            //BlobColor = 255 // to extract light blobs
        };

        SimpleBlobDetector simpleBlobDetector = SimpleBlobDetector.Create(detectorParams);

        KeyPoint[] blobs = simpleBlobDetector.Detect(ir8Bit);


        foreach (KeyPoint kp in blobs)
        {
            Vector2 blobPt = new Vector2(kp.Pt.X, kp.Pt.Y);

            //transform ir point to unity world space
            Vector2 irDimensions = new Vector2(kinectManager.IRWidth, kinectManager.IRHeight);
            irTrack.transform.localPosition = KinectCVUtilities.TransformTextureToUnity(irPlane, irDimensions, blobPt) + irOffset;


            //transform ir point to color space, then world space
            DepthSpacePoint depthPt = new DepthSpacePoint();
            depthPt.X = blobPt.x;
            depthPt.Y = blobPt.y;
            double          depth         = GetAvg(kinectManager.DepthData, (int)depthPt.X, (int)depthPt.Y, kinectManager.DepthWidth, kinectManager.DepthHeight);
            ColorSpacePoint colorMappedPt = kinectManager.Sensor.CoordinateMapper.MapDepthPointToColorSpace(depthPt, (ushort)depth);

            Vector2 colorDimensions = new Vector2(kinectManager.ColorWidth, kinectManager.ColorHeight);
            Vector2 colorPt         = new Vector2(colorMappedPt.X, colorMappedPt.Y);
            colorTrack.transform.localPosition = KinectCVUtilities.TransformTextureToUnity(colorPlane, colorDimensions, colorPt) + colorOffset;
        }


        //convert back to unity texture, add nice debug drawings
        Mat irImageKeyPoints = new Mat();

        Cv2.DrawKeypoints(ir8Bit, blobs, irImageKeyPoints, color: Scalar.FromRgb(255, 0, 0),
                          flags: DrawMatchesFlags.DrawRichKeypoints);

        //Convert back to RGBA32
        Mat irImageOut = new Mat(IRWidth, IRHeight, MatType.CV_8UC4);

        Cv2.CvtColor(irImageKeyPoints, irImageOut, ColorConversionCodes.BGR2RGBA);      //OpenCV is weird and has it in BGR format

        //load onto texture
        byte[] rawTextureData = KinectCVUtilities.ConvertMatToBytes(irImageOut);

        if (overrideIRTexture)
        {
            kinectManager.IRTexture.LoadRawTextureData(rawTextureData);
            kinectManager.IRTexture.Apply();
        }
    }
Beispiel #8
0
        static void Main(string[] args)
        {
            var srcImage = new Mat(@"..\..\Images\cvlbl.png");

            Cv2.ImShow("Source", srcImage);
            Cv2.WaitKey(1); // do events


            var binaryImage = new Mat(srcImage.Size(), MatType.CV_8UC1);

            Cv2.CvtColor(srcImage, binaryImage, ColorConversionCodes.BGRA2GRAY);
            Cv2.Threshold(binaryImage, binaryImage, thresh: 100, maxval: 255, type: ThresholdTypes.Binary);

            var detectorParams = new SimpleBlobDetector.Params
            {
                //MinDistBetweenBlobs = 10, // 10 pixels between blobs
                //MinRepeatability = 1,

                //MinThreshold = 100,
                //MaxThreshold = 255,
                //ThresholdStep = 5,

                FilterByArea = false,
                //FilterByArea = true,
                //MinArea = 0.001f, // 10 pixels squared
                //MaxArea = 500,

                FilterByCircularity = false,
                //FilterByCircularity = true,
                //MinCircularity = 0.001f,

                FilterByConvexity = false,
                //FilterByConvexity = true,
                //MinConvexity = 0.001f,
                //MaxConvexity = 10,

                FilterByInertia = false,
                //FilterByInertia = true,
                //MinInertiaRatio = 0.001f,

                FilterByColor = false
                                //FilterByColor = true,
                                //BlobColor = 255 // to extract light blobs
            };
            var simpleBlobDetector = SimpleBlobDetector.Create(detectorParams);
            var keyPoints          = simpleBlobDetector.Detect(binaryImage);

            Console.WriteLine("keyPoints: {0}", keyPoints.Length);
            foreach (var keyPoint in keyPoints)
            {
                Console.WriteLine("X: {0}, Y: {1}", keyPoint.Pt.X, keyPoint.Pt.Y);
            }

            var imageWithKeyPoints = new Mat();

            Cv2.DrawKeypoints(
                image: binaryImage,
                keypoints: keyPoints,
                outImage: imageWithKeyPoints,
                color: Scalar.FromRgb(255, 0, 0),
                flags: DrawMatchesFlags.DrawRichKeypoints);


            Cv2.ImShow("Key Points", imageWithKeyPoints);
            Cv2.WaitKey(1); // do events


            Cv2.WaitKey(0);

            Cv2.DestroyAllWindows();
            srcImage.Dispose();
            imageWithKeyPoints.Dispose();
        }
Beispiel #9
0
        Vector3 triangulate(int j, HyperMegaStuff.HyperMegaLines drawer = null)
        {
            Ray[] rays         = new Ray[2];
            Mat   workingImage = new Mat(calibrationDevices[j].webcam.leftImage.Height,
                                         calibrationDevices[j].webcam.leftImage.Width,
                                         calibrationDevices[j].webcam.leftImage.Type(), 0);

            for (int i = 0; i < 2; i++)
            {
                Mat curMat = i == 0 ? calibrationDevices[j].webcam.leftImage :
                             calibrationDevices[j].webcam.rightImage;

                if (calibrationDevices[j].subtractionImage[i] != null)
                {
                    // Subtract the background from the curMat
                    Cv2.Subtract(curMat, calibrationDevices[j].subtractionImage[i], workingImage);

                    // Threshold the image to separate black and white
                    Cv2.Threshold(workingImage, workingImage, blobThreshold, 255, ThresholdTypes.BinaryInv); // TODO MAKE THRESHOLD TUNABLE

                    // Detect Blobs using the Mask
                    var settings = new SimpleBlobDetector.Params();
                    settings.FilterByArea        = false;
                    settings.FilterByColor       = false;
                    settings.FilterByInertia     = true;
                    settings.FilterByConvexity   = true;
                    settings.FilterByCircularity = false;
                    SimpleBlobDetector detector = SimpleBlobDetector.Create();
                    KeyPoint[]         blobs    = detector.Detect(workingImage, calibrationDevices[j].maskImage[i]);
                    Cv2.DrawKeypoints(workingImage, blobs, workingImage, 255);
                    int biggest = -1; float size = 0;
                    for (int k = 0; k < blobs.Length; k++)
                    {
                        if (blobs[k].Size > size)
                        {
                            biggest = k;
                            size    = blobs[k].Size;
                        }
                    }

                    // If there's only one blob in this image, assume it's the white circle
                    if (blobs.Length > 0)
                    {
                        float[] pointArr         = { blobs[biggest].Pt.X, blobs[biggest].Pt.Y };
                        Mat     point            = new Mat(1, 1, MatType.CV_32FC2, pointArr);
                        Mat     undistortedPoint = new Mat(1, 1, MatType.CV_32FC2, 0);
                        Cv2.UndistortPoints(point, undistortedPoint, calibrationDevices[j].calibration.cameras[i].cameraMatrixMat,
                                            calibrationDevices[j].calibration.cameras[i].distCoeffsMat,
                                            calibrationDevices[j].calibration.cameras[i].rectificationMatrixMat);
                        Point2f[] rectilinear = new Point2f[1];
                        undistortedPoint.GetArray(0, 0, rectilinear);
                        Transform camera = i == 0 ? calibrationDevices[j].LeftCamera : calibrationDevices[j].RightCamera;
                        rays[i] = new Ray(camera.position, camera.TransformDirection(
                                              new Vector3(-rectilinear[0].X, rectilinear[0].Y, 1f)));
                        if (drawer != null)
                        {
                            drawer.color = ((j == 0) != (i == 0)) ? Color.cyan : Color.red;
                            drawer.DrawRay(rays[i].origin, rays[i].direction);
                        }
                    }
                }
            }
            workingImage.Release();

            // Only accept the triangulated point if the rays match up closely enough
            if (rays[0].origin != Vector3.zero &&
                rays[1].origin != Vector3.zero)
            {
                Vector3 point1 = RayRayIntersection(rays[0], rays[1]);
                Vector3 point2 = RayRayIntersection(rays[1], rays[0]);

                if (Vector3.Distance(point1, point2) < 0.005f)
                {
                    return((point1 + point2) * 0.5f);
                }
                else
                {
                    return(Vector3.zero);
                }
            }
            else
            {
                return(Vector3.zero);
            }
        }
Beispiel #10
0
        /// <summary>
        /// 最大値サーチ
        /// </summary>
        public void detect()
        {
            if (appSettings.CamPlatform == Platform.MT2)
            {
                theta_c = -udpkv.cal_mt2_theta(appSettings.Flipmode, appSettings.FlipOn) - appSettings.Theta;
            }

            if (!appSettings.UseDetect)
            {
                return;
            }
            int th_id = System.Threading.Thread.CurrentThread.ManagedThreadId; Console.WriteLine("detect ThreadID : " + th_id);

            #region 位置検出1(MaxMin)
            // Mask update
            if (imgdata.id % 30 == 0)
            {
                //using (Mat img_avg = img_mask.Clone())
                {
                    try
                    {
                        double gain       = 1.0;
                        double offset     = 0.0;//-8;
                        double star_thres = 32;
                        //       Cv2.Min(imgdata.img, img_mask, img2); //fixed Mask
                        //Cv.Sub(imgdata.img, img_dark8, img2);
                        //Cv.ConvertScale(imgAvg, img_avg, gain, offset);
                        img_mask2 = imgAvg.ConvertScaleAbs(gain, offset);
                        //using (var img_avg = img_mask2 - img_dark8)
                        using (var img_avg = img_mask2.Clone())
                            using (Mat binary = img_mask2.Clone())
                                using (Mat binaryAdaptive = img_mask2.Clone())
                                {
                                    Cv2.Threshold(img_avg, binary, star_thres, 255, ThresholdTypes.BinaryInv);                                   // 4ms
                                    Cv2.AdaptiveThreshold(img_avg, binaryAdaptive, 255,
                                                          AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 19, star_adaptive_threshold); // 9x9 53ms

                                    //cvwin.Image = binaryAdaptive;
                                    Cv2.Min(binaryAdaptive, binary, binaryAdaptive);
                                    //cvwin.Image = binary;
                                    Cv2.Min(img_mask, binaryAdaptive, img_mask2);
                                    //cvwin.Image = img_mask2;
                                    img2 = imgdata.img - img_avg; // Cv2.Sub(imgdata.img, img_avg, img2);
                                    //cvwin.Image = img2;

                                    //Cv2.ImShow("binaryAdaptive", binaryAdaptive);
                                    //Cv2.ImShow("binary", binary);
                                    //Cv2.ImShow("img-avg", img2.PyrDown().PyrDown());
                                }
                    } //  ms
                    catch (KeyNotFoundException)
                    {
                        MessageBox.Show("KeyNotFoundException:211a");
                    }
                }

                try
                {
                    double minv;
                    Point  minloc, maxloc;
                    //Cv.Smooth(imgdata.img, img2, SmoothType.Median, 5, 0, 0, 0);
                    //Cv.Threshold(img2, img2, appSettings.ThresholdBlob, 255, ThresholdType.Binary); //2ms
                    Cv2.MinMaxLoc(img2, out minv, out max_val, out minloc, out maxloc, img_mask2);
                    gx = maxloc.X; gy = maxloc.Y;
                    //Cv.Threshold(imgdata.img, img2, appSettings.ThresholdBlob, 255, ThresholdType.Binary); //2ms  fishはマスクが必要
                    //blobs.Label(img2); //3ms
                }//8ms
                catch (KeyNotFoundException)
                {
                    MessageBox.Show("KeyNotFoundException:211");
                }
            }
            #endregion

            #region 位置検出2(Blob)
            try
            {
                //Cv.Smooth(imgdata.img, img2, SmoothType.Median, 5, 0, 0, 0);
                //Cv.Threshold(img2, img2, appSettings.ThresholdBlob, 255, ThresholdType.Binary); //2ms
                //Cv.Min(imgdata.img, img_mask, img2);
                Cv2.Threshold(imgdata.img, img2, appSettings.ThresholdBlob, 255, ThresholdTypes.Binary); //2ms  fishはマスクが必要
                SimpleBlobDetector.Params param = new SimpleBlobDetector.Params();
                param.MaxArea = 100000;

                var      detector   = SimpleBlobDetector.Create(param);
                var      keypoints1 = detector.Detect(img2);
                KeyPoint maxkey     = new KeyPoint(new Point2f(0, 0), -1);
                foreach (var keyPoint in keypoints1)
                {
                    if (maxkey.Size < keyPoint.Size)
                    {
                        maxkey = keyPoint;
                    }
                    Console.WriteLine("X: {0}, Y: {1}", keyPoint.Pt.X, keyPoint.Pt.Y);
                }
                Console.WriteLine("MAX X: {0}, Y: {1} Size: {2}", maxkey.Pt.X, maxkey.Pt.Y, maxkey.Size);
                maxKeyPoint = maxkey;
                //blobs.Label(img2); //3ms
            }//8ms
            catch (KeyNotFoundException)
            {
                MessageBox.Show("KeyNotFoundException:211");
            }
            if (appSettings.UseDetect)
            {
                return;                     //必ずreturn
            }
            try
            {
                if (blobs.Count > 0)
                {
                    int min_area = Math.Max(2, (int)(appSettings.ThresholdMinArea * maxBlob.Area));
                    blobs.FilterByArea(min_area, int.MaxValue); //2ms 面積がmin_area未満のblobを削除
                }
                max_label = 0;
                if (blobs.Count > 0)
                {
                    max_label = pos_mes.mesure(blobs); //4ms
                    //max_label = pos_mes.mesure(keyPoints1); //4ms
                }
            }//1ms
            catch (KeyNotFoundException)
            {
                MessageBox.Show("KeyNotFoundException:213");
            }

            if (max_label > 0 && blobs.ContainsKey(max_label))
            {
                try
                {
                    maxBlob = blobs[max_label];
                }
                catch (KeyNotFoundException)
                {
                    MessageBox.Show("KeyNotFoundException:2171");
                }
                try
                {
                    max_centroid = maxBlob.Centroid;
                }
                catch (KeyNotFoundException)
                {
                    MessageBox.Show("KeyNotFoundException:2172");
                }
                try
                {
                    gx        = max_centroid.X;
                    gy        = max_centroid.Y;
                    max_val   = maxBlob.Area;
                    blob_rect = maxBlob.Rect;
                }
                catch (KeyNotFoundException)
                {
                    MessageBox.Show("KeyNotFoundException:2173");
                }

                // 観測値(kalman)
                measurement.Set <double>(0, 0, (float)(gx - xoa)); //2ms
                measurement.Set <double>(1, 0, (float)(gy - yoa)); //7ms
                if (kalman_id++ == 0)
                {
                    // 初期値設定
                    double errcov = 1.0;
                    kalman.StatePost.Set(0, measurement.At <double>(0));
                    kalman.StatePost.Set(1, measurement.At <double>(1));
                    Cv2.SetIdentity(kalman.ErrorCovPost, new Scalar(errcov));
                }//2ms
                // 修正フェーズ(kalman)
                try
                {
                    correction = kalman.Correct(measurement);
                }
                catch (KeyNotFoundException)
                {
                    MessageBox.Show("KeyNotFoundException:216");
                }

                // 予測フェーズ(kalman)
                try
                {
                    prediction = kalman.Predict();
                    kgx        = prediction.At <double>(0, 0) + xoa;
                    kgy        = prediction.At <double>(0, 1) + yoa;
                    kvx        = prediction.At <double>(0, 2);
                    kvy        = prediction.At <double>(0, 3);
                } //1ms
                catch (KeyNotFoundException)
                {
                    MessageBox.Show("KeyNotFoundException:215");
                }

                // カルマン or 観測重心 の選択
                sgx = gx; sgy = gy;
                if ((Math.Abs(kgx - gx) + Math.Abs(kgy - gy) < 15))  //
                {
                    sgx = kgx;
                    sgy = kgy;
                    //imgSrc.Circle(new CvPoint((int)(prediction.DataArraySingle[0] + xoa), (int)(prediction.DataArraySingle[1] + yoa)), 30, new CvColor(100, 100, 255));
                    //w2.WriteLine("{0:D3} {1:F2} {2:F2} {3:F2} {4:F2} {5} {6} {7}", i, max_centroid.X, max_centroid.Y, prediction.DataArraySingle[0] + xc, prediction.DataArraySingle[1] + yc, vm, dx, dy);
                }
                dx = sgx - appSettings.Xoa;
                dy = sgy - appSettings.Yoa;

                // 目標位置からの誤差(pix)からターゲットの位置を計算
                if (appSettings.CamPlatform == Platform.MT3)
                {
                    try
                    {
                        theta_c = -udpkv.cal_mt3_theta() - appSettings.Theta;
                        udpkv.cxcy2azalt(-dx, -dy, udpkv.az2_c, udpkv.alt2_c, udpkv.mt3mode, theta_c, appSettings.FocalLength, appSettings.Ccdpx, appSettings.Ccdpy, ref az, ref alt);
                        udpkv.cxcy2azalt(-(dx + kvx), -(dy + kvy), udpkv.az2_c, udpkv.alt2_c, udpkv.mt3mode, theta_c, appSettings.FocalLength, appSettings.Ccdpx, appSettings.Ccdpy, ref az1, ref alt1);
                        vaz  = udpkv.vaz2_kv + (az1 - az) * appSettings.Framerate;
                        valt = udpkv.valt2_kv + (alt1 - alt) * appSettings.Framerate;

                        daz = az - udpkv.az2_c; dalt = alt - udpkv.alt2_c;             //位置誤差
                        //dvaz = (daz - daz1) / dt; dvalt = (dalt - dalt1) / dt;        //速度誤差
                        //diff_vaz = (az - az_pre1) / dt; diff_valt = (alt - alt_pre1) / dt; //速度差

                        az0 = az; alt0 = alt;
                    }
                    catch (KeyNotFoundException)
                    {
                        MessageBox.Show("KeyNotFoundException:218");
                    }
                }
                else if (appSettings.CamPlatform == Platform.MT2)
                {
                    try
                    {
                        theta_c = -udpkv.cal_mt2_theta(appSettings.Flipmode, appSettings.FlipOn) - appSettings.Theta;
                        udpkv.cxcy2azalt_mt2(+dx, +dy, udpkv.az1_c, udpkv.alt1_c, udpkv.mt2mode, theta_c, appSettings.FocalLength, appSettings.Ccdpx, appSettings.Ccdpy, ref az, ref alt);
                        udpkv.cxcy2azalt_mt2(+(dx + kvx), +(dy + kvy), udpkv.az1_c, udpkv.alt1_c, udpkv.mt2mode, theta_c, appSettings.FocalLength, appSettings.Ccdpx, appSettings.Ccdpy, ref az1, ref alt1);
                        vaz  = udpkv.vaz1_kv + (az1 - az) * appSettings.Framerate;
                        valt = udpkv.valt1_kv + (alt1 - alt) * appSettings.Framerate;

                        daz = az - udpkv.az1_c; dalt = alt - udpkv.alt1_c;             //位置誤差
                        az0 = az; alt0 = alt;
                    }
                    catch (KeyNotFoundException)
                    {
                        MessageBox.Show("KeyNotFoundException:218b");
                    }
                }
                // Data send
                if (ImgSaveFlag == TRUE)
                {
                    // 観測目標移動速作成
                    double vk = 1000;  // [pixel/frame]
                    if (kalman_id > 3)
                    {
                        vk = Math.Sqrt(kvx * kvx + kvy * kvy);
                    }
                    // 観測データ送信
                    //Pid_Data_Send(true);
                    short id_short = (short)frame_id;
                    if (id_short < 0)
                    {
                        id_short = (short)(-id_short);
                    }
                    Pid_Data_Send_KV1000_SpCam2(id_short, daz, dalt, vk); // 32767->7FFF

                    if (Math.Abs(udpkv.vaz2_kv) > 0.1 || Math.Abs(udpkv.valt2_kv) > 0.1)
                    {
                        // 保存時間延長
                        //timerSavePostTime.Stop();
                        timerSavePost.Stop();
                        timerSavePost.Start();
                    }
                }
            }
            else
            {
                if (ImgSaveFlag == TRUE)
                {
                    // 観測データ送信
                    //Pid_Data_Send(false);
                    ////Pid_Data_Send_KV1000_SpCam2((short)(-(id & 32767)), (az - udpkv.az2_c), (alt - udpkv.alt2_c), -1000);
                }
                gx      = gy = 0;
                sgx     = sgy = 0;
                max_val = 0;
            }

            // kvデータのチェック用
            if (ImgSaveFlag == TRUE)
            {
                //xpos = ((kd.x1 << 8) + kd.x0) << 4; // <<16 ->256*256  <<8 ->256
                //ypos = ((kd.y1 << 8) + kd.y0) << 4; // <<16 ->256*256  <<8 ->256
                string st = DateTime.Now.ToString("yyyy/MM/dd HH:mm:ss.fff ") + "(" + udpkv.xpos + " " + udpkv.ypos + ")( " + udpkv.x2pos + " " + udpkv.y2pos + ") " + udpkv.kd.x1 + " " + udpkv.kd.x0 + " " + udpkv.kd.y1 + " " + udpkv.kd.y0 + "\n";
                logger.Info(st);
            }

            #endregion

            elapsed2 = sw.ElapsedTicks; sw.Stop(); sw.Reset();
            // 処理速度
            double sf = (double)Stopwatch.Frequency / 1000; //msec
            lap0   = (1 - alpha) * lap0 + alpha * elapsed0 / sf;
            lap1   = (1 - alpha) * lap1 + alpha * elapsed1 / sf;
            lap2   = (1 - alpha) * lap2 + alpha * elapsed2 / sf;
            fr_str = String.Format("ID:{0,5:D1} L0:{1,4:F2} L1:{2,4:F2} L2:{3,4:F2}", frame_id, lap0, lap1, lap2);

            // ワイドダイナミックレンジ用設定 Exp 100-1-100-1-
            if (checkBox_DispMode.Checked)
            {
                // IDS
                if (cam_maker == Camera_Maker.IDS)
                {
                    //  statusRet = cam.Timing.Exposure.Get(out gx);
                    //  if (gx > set_exposure - 1)
                    //      statusRet = cam.Timing.Exposure.Set(set_exposure1);
                    //  else
                    //      statusRet = cam.Timing.Exposure.Set(set_exposure);
                }
            }
        }
Beispiel #11
0
        static void Main(string[] args)
        {
            //Thread capturaVideoThread = new Thread(new ThreadStart(Program.CapturarVideo));
            //capturaVideoThread.Start();

            VideoCapture captura = new VideoCapture("D:\\Dictuc\\out1.avi");
            VideoWriter  salida  = new VideoWriter("D:\\Dictuc\\outSegmentado.avi", FourCC.XVID, 10.0, new Size(captura.FrameWidth, captura.FrameHeight), true);

            Mat imagenProcesada = new Mat();
            int numImg          = 0;

            while (true)
            {
                //captura.Read(imagen);
                imagen = Cv2.ImRead("D:\\uvas2.jpg");
                mutex.WaitOne();
                imagen.CopyTo(imagenProcesada);
                mutex.ReleaseMutex();
                Mat          imagenRuidoFiltrado     = FiltradoRuido(imagenProcesada);
                Mat          imagenGrisContraste     = EscalaGrisesEqualizada(imagenRuidoFiltrado);
                Mat          imagenGrisFrecAltasProc = FrecuenciasAltasPotenciadasContraste(imagenGrisContraste);
                EdgeDetector edgeDetector            = new EdgeDetector()
                {
                    Threshold           = (byte)18,
                    SparseDistance      = 3,
                    WeightPreviousPoint = (float)2.0,
                    WeightCurrentPoint  = (float)1.0,
                    WeightAfterPoint    = (float)2.0,
                };

                EdgeDetector edgeDetector2 = new EdgeDetector()
                {
                    Threshold           = (byte)20,
                    SparseDistance      = 5,
                    WeightPreviousPoint = (float)0.5,
                    WeightCurrentPoint  = (float)1.0,
                    WeightAfterPoint    = (float)0.5,
                };

                Mat imagenBordes = edgeDetector.EdgeImage(imagenGrisContraste);
                Mat imagenBordes2 = edgeDetector2.EdgeImage(imagenGrisContraste);
                Mat imagenBinaria, imagenAberturaRelleno;
                CalculoMatrizBinariaYRelleno(imagenBordes2, out imagenBinaria, out imagenAberturaRelleno);

                Mat mascaraInv = 255 - imagenAberturaRelleno;

                Mat DistSureFg  = new Mat();
                Mat AreasSureFg = new Mat();
                Mat Unknown     = new Mat();
                AreasSureFg += 1;
                Cv2.DistanceTransform(imagenAberturaRelleno, DistSureFg, DistanceTypes.L1, DistanceMaskSize.Mask5);
                int numAreas = Cv2.ConnectedComponents(imagenAberturaRelleno, AreasSureFg, PixelConnectivity.Connectivity8);

                float[,] distValues = new float[DistSureFg.Rows, DistSureFg.Cols];

                for (int i = 0; i < DistSureFg.Rows; i++)
                {
                    for (int j = 0; j < DistSureFg.Cols; j++)
                    {
                        distValues[i, j] = DistSureFg.At <float>(i, j);
                    }
                }

                Segment[] segments = new Segment[numAreas];

                for (int i = 0; i < AreasSureFg.Rows; i++)
                {
                    for (int j = 0; j < AreasSureFg.Cols; j++)
                    {
                        int   m = AreasSureFg.At <Int32>(i, j);
                        byte  pixelSurrounding = 0;
                        float distance         = (float)0;

                        //if (i >= 1)
                        //{
                        //    distance = distValues[i - 1, j];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_LEFT;
                        //    }
                        //}
                        //if (i < AreasSureFg.Rows - 1)
                        //{
                        //    distance = distValues[i + 1, j];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_RIGHT;
                        //    }
                        //}
                        //if (j >= 1)
                        //{
                        //    distance = distValues[i, j - 1];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_DOWN;
                        //    }
                        //}
                        //if (j < AreasSureFg.Cols - 1)
                        //{
                        //    distance = distValues[i, j + 1];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_UP;
                        //    }
                        //}

                        SegmentPixelData newPixel = new SegmentPixelData()
                        {
                            Distance          = distValues[i, j],
                            CoordsXY          = new int[] { i, j },
                            Concave           = 0,
                            Indexes           = new int[] { -1, -1 },
                            PixelsSurrounding = pixelSurrounding,
                            SubsegmentLabel   = 0,
                        };

                        if (segments[m] == null)
                        {
                            segments[m] = new Segment()
                            {
                                SegmentId = m,
                                PixelData = new List <SegmentPixelData>(),
                            };
                        }
                        else
                        {
                            segments[m].MaxDistance = (segments[m].MaxDistance > newPixel.Distance) ? (int)segments[m].MaxDistance : (int)newPixel.Distance;
                            segments[m].PixelData.Add(newPixel);
                        }
                    }
                }

                Mat Centroides = new Mat();
                imagenAberturaRelleno.CopyTo(Centroides);
                var indexadorCentroides = Centroides.GetGenericIndexer <byte>();
                var indexadorFiguras    = AreasSureFg.GetGenericIndexer <Int32>();

                foreach (var s in segments.Where(s => s.Circularity <= 0.9))
                {
                    int distancia = 0;
                    if (s.Circularity > 0.7)
                    {
                        distancia = 5;
                    }
                    else if (s.Circularity > 0.5)
                    {
                        distancia = 5;
                    }
                    else if (s.Circularity > 0.25)
                    {
                        distancia = 6;
                    }
                    else
                    {
                        distancia = 6;
                    }

                    distancia = (distancia < s.MaxDistance) ? distancia : s.MaxDistance - 1;

                    foreach (var p in s.PixelData.Where(p => p.Distance <= distancia))
                    {
                        if (imagenAberturaRelleno.At <byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                        {
                            indexadorCentroides[p.CoordsXY[0], p.CoordsXY[1]] = 0;
                        }
                    }
                }

                Cv2.Subtract(imagenAberturaRelleno + 255, Centroides, Unknown);

                #region segmentStuff
                //List<int> indexConcavos = segments.Where(s => s.Circularity > 1).Select(s => s.SegmentId).ToList();


                //foreach (var s in segments.Where(s => s.Circularity < 1.1 && s.Circularity > 0.9))
                //{
                //    foreach (var p in s.PixelData/*.Where(p => p.Distance == 1)*/)
                //    {
                //        if (imagenAberturaRelleno.At<byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments.Where(s => s.Circularity >= 1.1))
                //{
                //    foreach (var p in s.PixelData/*.Where(p => p.Distance == 1)*/)
                //    {
                //        if (imagenAberturaRelleno.At<byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments)
                //{
                //    s.SetPixelConcavity();
                //    s.Segmentation();
                //    foreach (var p in s.PixelData.Where(p => p.Distance == 1))
                //    {
                //        if (p.Concave == 1)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //        if (p.Concave == -1)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments)
                //{
                //    //s.SetPixelConcavity();
                //    //s.Segmentation();
                //    foreach (var p in s.PixelData.Where(p => p.Distance == 2))
                //    {
                //        indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 230;
                //    }
                //}

                //imagenAberturaRelleno.CopyTo(SureFg);
                #endregion

                Mat colormap   = new Mat();
                Mat Marcadores = new Mat();
                Cv2.ConnectedComponents(Centroides, Marcadores);
                Marcadores = Marcadores + 1;
                var indexador2 = Marcadores.GetGenericIndexer <Int32>();
                for (int i = 0; i < Unknown.Rows; i++)
                {
                    for (int j = 0; j < Unknown.Cols; j++)
                    {
                        if (Unknown.At <byte>(i, j) == 255)
                        {
                            indexador2[i, j] = 0;
                        }
                    }
                }

                Marcadores.CopyTo(colormap);
                colormap.ConvertTo(colormap, MatType.CV_8UC3);
                Cv2.ApplyColorMap(colormap, colormap, ColormapTypes.Rainbow);
                Cv2.ImWrite("D:\\Dictuc\\marcadores.png", Marcadores);

                //Mat img1 = new Mat();
                //imagen.CopyTo(img1);
                Mat DistColor = new Mat();
                //imagenGrisContraste = 255 - imagenGrisContraste;
                Cv2.CvtColor(imagenAberturaRelleno, DistColor, ColorConversionCodes.GRAY2BGR);
                DistColor.ConvertTo(DistColor, MatType.CV_8U);

                Cv2.Watershed(DistColor, Marcadores);


                Cv2.ImWrite("D:\\Dictuc\\watersheedIn.png", DistColor);

                var indexador4 = imagen.GetGenericIndexer <Vec3i>();
                //for (int i = 0; i < imagen.Rows; i++)
                //{
                //    for (int j = 0; j < imagen.Cols; j++)
                //    {
                //        //if (Centroides.At<byte>(i, j) > 0)
                //        //    indexador4[i, j] = new Vec3i(0, 0, 255);
                //        if (Marcadores.At<Int32>(i, j) == -1)
                //            indexador4[i, j] = new Vec3i(255, 20, 20);
                //    }
                //}


                for (int i = 0; i < imagen.Rows; i++)
                {
                    for (int j = 0; j < imagen.Cols; j++)
                    {
                        //if (Centroides.At<byte>(i, j) > 0)
                        //    indexador4[i, j] = new Vec3i(0, 0, 255);
                        if (imagenBordes.At <char>(i, j) > 0)
                        {
                            indexador4[i, j] = new Vec3i(255, 20, 20);
                        }
                    }
                }

                Mat seg = new Mat();
                Marcadores.CopyTo(seg);
                var indexador5 = seg.GetGenericIndexer <int>();
                for (int i = 0; i < Marcadores.Rows; i++)
                {
                    for (int j = 0; j < Marcadores.Cols; j++)
                    {
                        indexador5[i, j] = (Math.Abs(indexador5[i, j]) > 1) ? 255 : 0;
                    }
                }
                Mat kE1 = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(1, 1));
                Cv2.Erode(seg, seg, kE1, iterations: 3);
                int thrs1 = 1500;
                int thrs2 = 1800;
                Mat edge1 = new Mat();
                seg.ConvertTo(seg, MatType.CV_8U);
                Cv2.Canny(seg, edge1, thrs1, thrs2, apertureSize: 5);

                SimpleBlobDetector.Params params1 = new SimpleBlobDetector.Params()
                {
                    MinThreshold        = 0,
                    MaxThreshold        = 255,
                    FilterByArea        = true,
                    MinArea             = 15,
                    FilterByCircularity = false,
                    MinCircularity      = (float)0.01,
                    FilterByConvexity   = false,
                    MinConvexity        = (float)0.1,
                    FilterByInertia     = false,
                    MinInertiaRatio     = (float)0.01,
                };
                SimpleBlobDetector detectorBlobs = SimpleBlobDetector.Create(params1);
                KeyPoint[]         segmentosBlob = detectorBlobs.Detect(edge1);

                Mat segmentosBlobMat = new Mat(1, segmentosBlob.Count(), MatType.CV_32FC1);
                var indexador6       = segmentosBlobMat.GetGenericIndexer <float>();
                for (int i = 0; i < segmentosBlob.Count(); i++)
                {
                    indexador6[0, i] = segmentosBlob[i].Size;
                }

                Mat      hist   = new Mat();
                Rangef[] ranges = { new Rangef(0, (float)segmentosBlob.Max(x => x.Size)) };
                Cv2.CalcHist(new Mat[] { segmentosBlobMat }, new int[] { 0 }, null, hist, 1, new int[] { 100 }, ranges, uniform: true, accumulate: true);
                float[] histAcumulado           = new float[hist.Rows];
                float[] histAcumuladoPorcentaje = new float[11];

                histAcumulado[0] = hist.At <float>(0, 0);

                for (int i = 1; i < hist.Rows; i++)
                {
                    histAcumulado[i] = hist.At <float>(i, 0) + histAcumulado[i - 1];
                }

                int k = 1;
                for (int i = 1; i < histAcumuladoPorcentaje.Count(); i++)
                {
                    for (; k < hist.Rows; k++)
                    {
                        float porcentajeActual    = histAcumulado[k] / segmentosBlob.Count() * 100;
                        float porcentajeAnterior  = histAcumulado[k - 1] / segmentosBlob.Count() * 100;
                        float porcentajeRequerido = (float)((i < 10) ? i * 10 : 99.3);
                        if (porcentajeRequerido <= porcentajeActual)
                        {
                            float tamañoPorcentajeActual        = (float)(k * (float)segmentosBlob.Max(x => x.Size) / 100.0);
                            float tamañoPorcentajeAnterior      = (float)((k - 1) * (float)segmentosBlob.Max(x => x.Size) / 100.0);
                            float tasaVariacionTamañoPorcentaje = (tamañoPorcentajeActual - tamañoPorcentajeAnterior) / (porcentajeActual - porcentajeAnterior);
                            histAcumuladoPorcentaje[i] = tamañoPorcentajeAnterior + tasaVariacionTamañoPorcentaje * (i * 10 - porcentajeAnterior);
                            break;
                        }
                    }
                }

                for (int i = 0; i < histAcumuladoPorcentaje.Count(); i++)
                {
                    Console.Write(histAcumuladoPorcentaje[i] + ",");
                }
                Console.WriteLine("");

                //            data1 = [];

                //              for i in range(0, len(keypoints1)):

                //                data1.append(keypoints1[i].size * coefTamano)
                //                #tamano.write(str(i)+'\t'+str(keypoints1[i].size*2*0.3)+'\n')
                //  cv2.line(im_with_keypoints1, (int(float(keypoints1[i].pt[0] - keypoints1[i].size)), int(float(keypoints1[i].pt[1]))), (int(float(keypoints1[i].pt[0] + keypoints1[i].size)), int(float(keypoints1[i].pt[1]))), (255, 0, 0), 1)

                //                cv2.line(im_with_keypoints1, (int(float(keypoints1[i].pt[0])), int(float(keypoints1[i].pt[1] - keypoints1[i].size))), (int(float(keypoints1[i].pt[0])), int(float(keypoints1[i].pt[1] + keypoints1[i].size))), (255, 0, 0), 1)


                //# print(data1)
                //n1, bins1, patches1 = hist(data1, 200,[0, max(data1)], normed = 100, cumulative = True, bottom = True, histtype = 'stepfilled', align = 'mid', orientation = 'vertical', rwidth = 1, log = False, color = "r")

                //              tamano = open(temp + "instancia_" + instancia + ".txt", "w")


                //              x = np.array(bins1)

                //              y = np.append([0], n1)

                //                  xnew = [x[1], x[21], x[36], x[45], x[53], x[60], x[69], x[78], x[88], x[97], x[200]]
                //ynew = [y[1], y[21], y[36], y[45], y[53], y[60], y[69], y[78], y[88], y[97], y[200]]

                //tamano.write('INSERT INTO [dbo].[Granulometria](Cod_Instancia,Fecha,P_10,P_20,P_30,P_40,P_50,P_60,P_70,P_80,P_90,P_100, Filename) values (')
                //tamano.write(instancia + ",CONVERT(datetime, '" + sys.argv[1][0:4] + "-" + sys.argv[1][4:6] + "-" + sys.argv[1][6:8] + ' ' + sys.argv[1][9:11] + ':' + sys.argv[1][11:13] + ':' + sys.argv[1][13:15] + "', 120)")

                //for j in range(1, len(xnew)):
                //  #tamano.write (str(j)+'\t'+str(round(xnew[j],1))+'\t'+str(round(ynew[j]*100,2))+'\n')
                //  tamano.write(',' + str(round(xnew[j], 1)))

                //tamano.write(",'" + sys.argv[1] + " - Resultado.jpg'")
                //tamano.write(')')

                //CvXImgProc.Thinning(mascaraInv, mascaraInv, ThinningTypes.ZHANGSUEN);

                Mat imWithKeypoints1 = new Mat();
                Cv2.DrawKeypoints(imagen, segmentosBlob, imWithKeypoints1, new Scalar(0, 0, 255), DrawMatchesFlags.DrawRichKeypoints);


                var dataTamaños = segmentosBlob.Select(s => s.Size).ToArray();


                Cv2.ImWrite("D:\\Dictuc\\output0" + numImg + ".png", imagen);
                Cv2.ImWrite("D:\\Dictuc\\output1" + numImg++ + ".png", imWithKeypoints1);

                Cv2.ImShow("Segmentado", imagen);
                Cv2.ImShow("GrisContraste", imagenGrisContraste);
                Cv2.ImShow("bordes90", imagenBordes);
                Cv2.ImShow("bordes50", imagenBordes2);

                salida.Write(imagen);

                //System.Threading.Thread.Sleep(10);
                Cv2.WaitKey(10);

                imagenRuidoFiltrado.Release();
                imagenGrisContraste.Release();
                imagenGrisFrecAltasProc.Release();
                imagenBordes.Release();
                imagenBinaria.Release();
                imagenAberturaRelleno.Release();
            }
        }
Beispiel #12
0
        private unsafe void OpenCV(ref Bitmap bitmap)
        {
            Mat         testMat = BitmapConverter.ToMat(bitmap);
            MatOfDouble mu      = new MatOfDouble();
            MatOfDouble sigma   = new MatOfDouble();

            Cv2.MeanStdDev(testMat, mu, sigma);
            double mean = mu.GetArray(0, 0)[0];

            mu.Dispose();
            sigma.Dispose();

            SimpleBlobDetector.Params circleParameters = new SimpleBlobDetector.Params();
            circleParameters.FilterByCircularity = true;
            circleParameters.MinCircularity      = (float)0.85;
            circleParameters.MaxCircularity      = (float)1;
            circleParameters.MinArea             = 30; // Modify the value on the fly (TODO use bigger circle)

            SimpleBlobDetector detectCircleBlobs = new SimpleBlobDetector(circleParameters);

            fingerPoints = detectCircleBlobs.Detect(testMat);
            detectCircleBlobs.Dispose();

            // If Finger found basically
            if (fingerPoints != null)
            {
                this.fingerSize = 0;
                int fingerIndex = -1;
                for (int i = 0; i < fingerPoints.Length; i++)
                {
                    if (fingerPoints[i].Size >= this.fingerSize)
                    {
                        this.fingerSize = (int)fingerPoints[i].Size;
                        fingerIndex     = i;
                    }
                }

                if (fingerIndex != -1)
                {
                    OpenCvSharp.CPlusPlus.Point coordinate = fingerPoints[fingerIndex].Pt;
                    this.fingerSize = (int)((fingerPoints[fingerIndex].Size) * Math.Sqrt(2));
                    testMat.Set <Vec3b>(coordinate.Y, coordinate.X, new Vec3b(0, 255, 0));
                    RotatedRect rRect           = new RotatedRect(new Point2f(coordinate.X, coordinate.Y), new Size2f(this.fingerSize, this.fingerSize), 0);
                    Point2f[]   circleVerticies = rRect.Points();
                    //this.fingerCoordinates[0] = coordinate.X;
                    //this.fingerCoordinates[1] = coordinate.Y;
                    int height = (int)(circleVerticies[0].Y - circleVerticies[1].Y);
                    int width  = (int)(circleVerticies[2].X - circleVerticies[1].X);
                    int startX = (int)(circleVerticies[0].X);
                    int startY = (int)(circleVerticies[1].Y);
                    this.fingerDepth = MapColortoDepth(startX, startY, this.fingerSize, this.fingerSize);
                    OpenCvSharp.CPlusPlus.Rect featureRect = new OpenCvSharp.CPlusPlus.Rect(startX, startY, this.fingerSize, this.fingerSize);

                    // Draw box around finger
                    for (int j = 0; j < 4; j++)
                    {
                        Cv2.Line(testMat, circleVerticies[j], circleVerticies[(j + 1) % 4], new Scalar(0, 255, 0));
                    }

                    Boolean    intersectOccurance = false;
                    List <int> intersectIndicies  = new List <int>();
                    for (int i = 0; i < this.controls.Count; i++)
                    {
                        if (this.controls[i].boundingRect.IntersectsWith(featureRect))
                        {
                            double diff = fingerDepth - this.controls[i].depth;
                            if (Math.Abs(diff) < 0.5)
                            {
                                intersectOccurance = true;
                                intersectIndicies.Add(i);
                            }
                        }
                    }

                    System.Text.StringBuilder append = new System.Text.StringBuilder();
                    if (intersectOccurance)
                    {
                        for (int i = 0; i < intersectIndicies.Count; i++)
                        {
                            append.Append(" " + this.controls[intersectIndicies[i]].title + " " + intersectIndicies[i].ToString());
                        }
                        this.OutputText = "Pressed Button" + append; //TODO Make this more obvious
                    }
                    else
                    {
                        this.OutputText = "No State";
                    }
                }
            }

            bitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(testMat);
            testMat.Dispose();
        }
        static void Main(string[] args)
        {
            var srcImage = new Mat(@"..\..\Images\cvlbl.png");
            Cv2.ImShow("Source", srcImage);
            Cv2.WaitKey(1); // do events

            var binaryImage = new Mat(srcImage.Size(), MatType.CV_8UC1);

            Cv2.CvtColor(srcImage, binaryImage, ColorConversion.BgrToGray);
            Cv2.Threshold(binaryImage, binaryImage, thresh: 100, maxval: 255, type: ThresholdType.Binary);

            var detectorParams = new SimpleBlobDetector.Params
            {
                //MinDistBetweenBlobs = 10, // 10 pixels between blobs
                //MinRepeatability = 1,

                //MinThreshold = 100,
                //MaxThreshold = 255,
                //ThresholdStep = 5,

                FilterByArea = false,
                //FilterByArea = true,
                //MinArea = 0.001f, // 10 pixels squared
                //MaxArea = 500,

                FilterByCircularity = false,
                //FilterByCircularity = true,
                //MinCircularity = 0.001f,

                FilterByConvexity = false,
                //FilterByConvexity = true,
                //MinConvexity = 0.001f,
                //MaxConvexity = 10,

                FilterByInertia = false,
                //FilterByInertia = true,
                //MinInertiaRatio = 0.001f,

                FilterByColor = false
                //FilterByColor = true,
                //BlobColor = 255 // to extract light blobs
            };
            var simpleBlobDetector = new SimpleBlobDetector(detectorParams);
            var keyPoints = simpleBlobDetector.Detect(binaryImage);

            Console.WriteLine("keyPoints: {0}", keyPoints.Length);
            foreach (var keyPoint in keyPoints)
            {
                Console.WriteLine("X: {0}, Y: {1}", keyPoint.Pt.X, keyPoint.Pt.Y);
            }

            var imageWithKeyPoints = new Mat();
            Cv2.DrawKeypoints(
                    image: binaryImage,
                    keypoints: keyPoints,
                    outImage: imageWithKeyPoints,
                    color: Scalar.FromRgb(255, 0, 0),
                    flags: DrawMatchesFlags.DrawRichKeypoints);

            Cv2.ImShow("Key Points", imageWithKeyPoints);
            Cv2.WaitKey(1); // do events

            Cv2.WaitKey(0);

            Cv2.DestroyAllWindows();
            srcImage.Dispose();
            imageWithKeyPoints.Dispose();
        }