Exemple #1
0
 //SGBM视差计算
 private void ucBtnExt_sgbmDisCal_BtnClick(object sender, EventArgs e)
 {
     if (IS_USE_SGBM_FLAG)
     {
         Mat tleftImg_gray  = new Mat();
         Mat trightImg_gray = new Mat();
         //转换灰度图
         Cv2.CvtColor(leftImg_sgbm_rectify, tleftImg_gray, ColorConversionCodes.BGR2GRAY);
         Cv2.CvtColor(rightImg_sgbm_rectify, trightImg_gray, ColorConversionCodes.BGR2GRAY);
         //计算视差图
         double min, max;
         //记录SGBM算法运行时间
         Stopwatch sw = new Stopwatch();
         sw.Start();
         sgbmMatch.Compute(tleftImg_gray, trightImg_gray, sgbmDisImg);
         sw.Stop();
         TimeSpan ts = sw.Elapsed;   //获取SGBM代码运行时间
         Cv2.MinMaxLoc(sgbmDisImg, out min, out max);
         //转换为8位图
         sgbmDisImg.ConvertTo(sgbm_dis8UImg, MatType.CV_8UC1, 255 / (max - min), -255 * min / (max - min));
         //转换为伪彩色图
         sgbm_dis24UImg = new Mat(sgbmDisImg.Size(), MatType.CV_8UC3);
         Cv2.ApplyColorMap(sgbm_dis8UImg, sgbm_dis24UImg, ColormapTypes.Jet);
         //计算SGBM的有效视差区域
         sgbm_validDisROI = Cv2.GetValidDisparityROI(ShareData.PixROI1, ShareData.PixROI2, sgbm_minDisparity,
                                                     sgbm_numofDisparities, sgbm_blockSize);
         ShareData.Log = "[msg] SGBM算法计算时间:" + ts.TotalMilliseconds.ToString() + "ms" + "\r\n"
                         + "SGBM视差图计算成功,可以测量了";
     }
     else
     {
         ShareData.Log = "[warning] 请先加载图片";
     }
 }
Exemple #2
0
        public Mat mappingPalette(Mat frame)
        {
            lock (this.Palette)
            {
                var ret = new Mat();
                switch (this.Palette)
                {
                case "Grayscale":
                    ret = frame;
                    break;

                case "Default":
                    ret = frame.LUT(FLIR_DEFAULT_PALETTE);
                    break;

                case "IronBlack":
                    ret = frame.LUT(IRON_BLACK_PALETTE);
                    break;

                default:
                    var ctype = (ColormapTypes)Enum.Parse(typeof(ColormapTypes), this.Palette);
                    Cv2.ApplyColorMap(frame, ret, ctype);
                    break;
                }

                return(ret);
            }
        }
Exemple #3
0
        public static Mat HeatMap(this Mat mat)
        {
            Mat heatMap = new Mat();

            Cv2.ApplyColorMap(mat, heatMap, ColormapTypes.Jet);
            return(heatMap);
        }
Exemple #4
0
        //public const long BUFFERSIZE = 640 * 480;
        //public static byte[] gBuffer = new byte[BUFFERSIZE];
        public void RefreshUI()
        {
            // 初始化gBuffer
            //int index = 0;
            //for (int i = 0; i < 480; i++)
            //{
            //    for (int j = 0; j < 640; j++)
            //    {
            //        gBuffer[index++] = 1;
            //    }
            //}
            //byte[] array = binaryReader.ReadBytes(arraySize);
            int arraySize = 480 * 640;

            byte[,] array2 = new byte[480, 640];
            //将一维数组转换为二维数组
            for (int j = 0; j < arraySize; j++)
            {
                array2[j / 640, j % 640] = DataAcquire.gBuffer[j];
            }

            Mat srcG = ConvertFile.ArrayToMat(array2);
            Mat dstG = new Mat();

            Cv2.ApplyColorMap(srcG, dstG, ColormapTypes.Jet);

            Bitmap bitmapG = ConvertFile.MatToBitmap(dstG);

            pictureBox.Image = bitmapG;
        }
        public override object Process(object input)
        {
            var list = input as object[];

            var net = list.First(z => z is Nnet) as Nnet;

            var f1 = net.Nodes.FirstOrDefault(z => !z.IsInput);


            var           rets3 = net.OutputDatas[f1.Name] as float[];
            InternalArray arr   = new InternalArray(f1.Dims);

            arr.Data = rets3.Select(z => (double)z).ToArray();


            Mat mat = new Mat(f1.Dims[2],
                              f1.Dims[3], MatType.CV_8UC1,
                              arr.Data.Select(z => (byte)(z * 255)).ToArray());

            Cv2.ApplyColorMap(mat, mat, ColormapTypes.Magma);
            mat = mat.Resize(net.lastReadedMat.Size());
            if (StackWithSourceImage)
            {
                Cv2.HConcat(net.lastReadedMat, mat, mat);
            }

            LastMat = mat;
            Pbox.Invoke((Action)(() =>
            {
                Pbox.Image = BitmapConverter.ToBitmap(mat);
            }));

            return(mat);
        }
Exemple #6
0
        /// <summary>
        /// Handles the FrameReceived event
        /// </summary>
        /// <param name="sender">The Sender</param>
        /// <param name="args">The FrameEventArgs</param>
        private void OnFrameReceived(object sender, FrameEventArgs args)
        {
            // Start an async invoke in case this method was not
            // called by the GUI thread.
            if (InvokeRequired == true)
            {
                BeginInvoke(new FrameReceivedHandler(this.OnFrameReceived), sender, args);
                return;
            }

            if (true == m_Acquiring)
            {
                Mat img = BitmapConverter.ToMat(new Bitmap(args.Image));
                img = img.CvtColor(ColorConversionCodes.RGB2GRAY);
                Mat colorMapImg = new Mat();
                //Cv2.ApplyColorMap(img, colorMapImg, ColormapTypes.Jet);
                Cv2.ApplyColorMap(img, colorMapImg, ColormapTypes.Jet);
                //Cv2.ImShow("Jet", colorMapImg);

                // Display image
                //Image image = args.Image;
                Image image = BitmapConverter.ToBitmap(colorMapImg);
                if (null != image)
                {
                    m_PictureBox.Image = image;
                }
                else
                {
                    LogMessage("An acquisition error occurred. Reason: " + args.Exception.Message);

                    try
                    {
                        try
                        {
                            // Start asynchronous image acquisition (grab) in selected camera
                            m_VimbaHelper.StopContinuousImageAcquisition();
                        }
                        finally
                        {
                            m_Acquiring = false;
                            UpdateControls();
                            m_CameraList.Enabled = true;
                        }

                        LogMessage("Asynchronous image acquisition stopped.");
                    }
                    catch (Exception exception)
                    {
                        LogError("Error while stopping asynchronous image acquisition. Reason: " + exception.Message);
                    }
                }
            }
        }
Exemple #7
0
        public void ApplyColorMap()
        {
            using var src = Image("building.jpg", ImreadModes.Color);
            using var dst = new Mat();
            Cv2.ApplyColorMap(src, dst, ColormapTypes.Cool);

            ShowImagesWhenDebugMode(src, dst);

            using var userColor = new Mat(256, 1, MatType.CV_8UC3, Scalar.All(128));
            Cv2.ApplyColorMap(src, dst, userColor);

            ShowImagesWhenDebugMode(src, dst);
        }
Exemple #8
0
        private void CreatImg()
        {
            if (Filter.SelectedIndex == 0 || Filter.SelectedIndex == -1)
            {
                for (int i = 0; i < 8640; i = i + 4)
                {
                    _iniImage[i / 4] = BitConverter.ToSingle(_recvBuf, i);
                }

                for (int i = _imGray.Height / 2; i < _imGray.Height; i++)
                {
                    for (int j = 0; j < _imGray.Width; j++)
                    {
                        float temp = _iniImage[(i - 27) + j * 27] * 255 / 50000;
                        //float temp = iniImage[i - 27 + (j + 8) * 27] * iniImage[i - 27 + (j + 8) * 27] * iniImage[i - 27 + (j + 8) * 27];
                        _imGray.Set <float>(i, j, temp);
                    }
                }

                Cv2.Resize(_imGray, imGrayResult2, new OpenCvSharp.Size(600, 300));
            }
            else
            {
                for (int i = 0; i < 8640; i = i + 4)
                {
                    _iniImage[i / 4] = BitConverter.ToSingle(_recvBuf, i);
                }

                for (int i = _imGray.Height / 2; i < _imGray.Height; i++)
                {
                    for (int j = 0; j < _imGray.Width; j++)
                    {
                        float temp = _iniImage[(i - 27) + j * 27] * 255 / 10000;

                        _imGray.Set <float>(i, j, temp);
                    }
                }

                Cv2.Resize(_imGray, imGrayResult2, new OpenCvSharp.Size(600, 300));
            }

            Cv2.ConvertScaleAbs(imGrayResult2, imGrayResult1);

            Point2f point2F = new Point2f(imGrayResult1.Width / 2, imGrayResult1.Height);

            Cv2.LinearPolar(imGrayResult1, recover, point2F, 300, InterpolationFlags.WarpInverseMap);
            Cv2.ApplyColorMap(recover, dst, ColormapTypes.Jet);

            Cv2.ImShow("Demo", dst);
        }
Exemple #9
0
 //使用Winform定时器
 private void timer_disparityMeasure_Tick(object sender, EventArgs e)
 {
     if (IS_USE_BM_FLAG)
     {
         if (BM_IMG_FLAG)
         {
             //控制是否从主窗体中读取图像
             tLeftImg  = ShareData.LeftImg;
             tRightImg = ShareData.RightImg;
             if (BM_DIS_CAL_FLAG && !IS_IMG_SHOW_PAUSE)
             {
                 validDisROI = Cv2.GetValidDisparityROI(ShareData.PixROI1, ShareData.PixROI2, bm_minDisparity, bm_numOfDisparities, bm_SADWinSize);
                 //转换为灰度图
                 Cv2.CvtColor(tLeftImg, tLeftImg_g, ColorConversionCodes.BGR2GRAY);
                 Cv2.CvtColor(tRightImg, tRightImg_g, ColorConversionCodes.BGR2GRAY);
                 //BM计算视差图
                 bmMatch.Compute(tLeftImg_g, tRightImg_g, bmDisImg);             //计算视差图
                 //转换为8位灰度图
                 double min, max;
                 Cv2.MinMaxLoc(bmDisImg, out min, out max);
                 bmDist8U = new Mat(bmDisImg.Size(), MatType.CV_8UC1);
                 bmDisImg.ConvertTo(bmDist8U, MatType.CV_8UC1, 255 / (max - min), -255 * min / (max - min));
                 //转换为伪彩色图
                 bmDist24U = new Mat(bmDisImg.Size(), MatType.CV_8UC3);
                 Cv2.ApplyColorMap(bmDist8U, bmDist24U, ColormapTypes.Jet);
             }
         }
         //控制图像显示
         if (IS_SHOW_BM_DISPARITY_FLAG)
         {
             //标记显示图像类型
             IMG_TYPE_TAG = 2;
             Cv2.Rectangle(bmDist24U, validDisROI, new Scalar(0, 0, 255), 1);
             this.pictureBoxIpl_Img.ImageIpl = bmDist24U;
         }
         else
         {
             //标记显示图像类型
             IMG_TYPE_TAG = 1;
             Cv2.Rectangle(tLeftImg, validDisROI, new Scalar(0, 0, 255), 1);
             this.pictureBoxIpl_Img.ImageIpl = tLeftImg;    //显示左视图
         }
         //validDisROI = new Rect();
     }
     if (IS_USE_SGBM_FLAG)
     {
     }
 }
Exemple #10
0
        public byte[] Predador(byte[] file)
        {
            Mat input = ConvertToMat(file);
            Mat grayInput;

            if (input.Channels() == 1)
            {
                grayInput = input;
            }
            else
            {
                grayInput = new Mat(input.Rows, input.Cols, MatType.CV_8UC3);
                Cv2.CvtColor(input, grayInput, ColorConversionCodes.BGR2GRAY);
            }

            var coloredImage = new Mat();

            Cv2.ApplyColorMap(grayInput, coloredImage, ColormapTypes.Hsv);
            return(ConvertToByte(coloredImage));
        }
Exemple #11
0
 //更改SGBM参数后用于重新计算,刷新图像显示
 private void ucBtnExt_sgbmrefresh_BtnClick(object sender, EventArgs e)
 {
     if (IS_USE_SGBM_FLAG)
     {
         /////////////重新计算视差图////////////////////////////
         Mat tleftImg_gray  = new Mat();
         Mat trightImg_gray = new Mat();
         Cv2.CvtColor(leftImg_sgbm, tleftImg_gray, ColorConversionCodes.BGR2GRAY);
         Cv2.CvtColor(rightImg_sgbm, trightImg_gray, ColorConversionCodes.BGR2GRAY);
         //计算视差图
         double min, max;
         //记录SGBM算法运行时间
         Stopwatch sw = new Stopwatch();
         sw.Start();
         sgbmMatch.Compute(tleftImg_gray, trightImg_gray, sgbmDisImg);
         sw.Stop();
         TimeSpan ts = sw.Elapsed;   //获取SGBM代码运行时间
         Cv2.MinMaxLoc(sgbmDisImg, out min, out max);
         //转换为8位图
         sgbmDisImg.ConvertTo(sgbm_dis8UImg, MatType.CV_8UC1, 255 / (max - min), -255 * min / (max - min));
         //转换为伪彩色图
         sgbm_dis24UImg = new Mat(sgbmDisImg.Size(), MatType.CV_8UC3);
         Cv2.ApplyColorMap(sgbm_dis8UImg, sgbm_dis24UImg, ColormapTypes.Jet);
         //计算SGBM的有效视差区域
         sgbm_validDisROI = Cv2.GetValidDisparityROI(ShareData.PixROI1, ShareData.PixROI2, sgbm_minDisparity,
                                                     sgbm_numofDisparities, sgbm_blockSize);
         /////////////////重新显示/////////////////////
         if (SGBM_IMG_TYPE_TAG == 1)
         {
             Cv2.Rectangle(leftImg_sgbm, sgbm_validDisROI, new Scalar(0, 0, 255), 1);
             this.pictureBoxIpl_Img.ImageIpl = this.leftImg_sgbm;
         }
         else if (SGBM_IMG_TYPE_TAG == 2)
         {
             Cv2.Rectangle(sgbm_dis24UImg, sgbm_validDisROI, new Scalar(255, 255, 255), 1);
             this.pictureBoxIpl_Img.ImageIpl = this.sgbm_dis24UImg;
         }
         ShareData.Log = "[msg] 刷新成功!" + "\r\n" +
                         "SGBM算法计算时间:" + ts.TotalMilliseconds.ToString() + "ms";
     }
 }
Exemple #12
0
        public bool oneShot(string path, int height = 0, int width = 0)
        {
            try
            {
                IGrabResult grabResult = grabStart(height, width);

                using (grabResult)
                {
                    if (grabResult.GrabSucceeded)
                    {
                        // convert image from basler IImage to OpenCV Mat
                        Mat img = convertIImage2Mat(grabResult);
                        // convert image from BayerBG to RGB
                        Cv2.CvtColor(img, img, ColorConversionCodes.BayerBG2GRAY);

                        Mat histo   = new Mat();
                        Mat heatmap = new Mat();
                        Mat dst     = img.Clone();

                        // Apply Histogram
                        histo = cvProcess.histogram(dst);

                        // Apply ColorMap
                        Cv2.ApplyColorMap(dst, heatmap, ColormapTypes.Rainbow);

                        // Save Original Image
                        if (saveOrigin)
                        {
                            Cv2.ImWrite(path + ".origin.jpg", img);
                        }

                        // Background map subtraction
                        Cv2.Subtract(dst, -5, dst);

                        // save images
                        if (saveTracked)
                        {
                            Cv2.ImWrite(path + ".jpg", dst);
                        }
                        if (saveHisto)
                        {
                            Cv2.ImWrite(path + ".histo.jpg", histo);
                        }
                        if (saveHeatmap)
                        {
                            Cv2.ImWrite(path + ".heatmap.jpg", heatmap);
                        }

                        // resize image  to fit the imageBox
                        Cv2.Resize(dst, dst, new OpenCvSharp.Size(960, 687), 0, 0, InterpolationFlags.Linear);
                        Cv2.Resize(heatmap, heatmap, new OpenCvSharp.Size(256, 183), 0, 0, InterpolationFlags.Linear);

                        // display images
                        BitmapToImageSource(dst);
                        BitmapHistoToImageSource(histo);
                        BitmapHeatmapToImageSource(heatmap);
                    }
                    else
                    {
                        System.Windows.MessageBox.Show("Error: {0} {1}" + grabResult.ErrorCode, grabResult.ErrorDescription);
                    }
                }

                return(true);
            }
            catch (Exception exception)
            {
                if (camera.IsOpen)
                {
                    camera.Close();
                }

                System.Windows.MessageBox.Show("Exception: {0}" + exception.Message);

                return(false);
            }
        }
Exemple #13
0
        private void th_grab(int height = 0, int width = 0, int snap_wait = 500)
        {
            try
            {
                // Set the acquisition mode to free running continuous acquisition when the camera is opened.
                camera.CameraOpened += Configuration.AcquireContinuous;

                // Open the connection to the camera device.
                camera.Open();

                if (width == 0 || width > camera.Parameters[PLCamera.Width].GetMaximum())
                {
                    camera.Parameters[PLCamera.Width].SetValue(camera.Parameters[PLCamera.Width].GetMaximum());
                }
                else if (width < camera.Parameters[PLCamera.Width].GetMinimum())
                {
                    camera.Parameters[PLCamera.Width].SetValue(camera.Parameters[PLCamera.Width].GetMinimum());
                }
                else
                {
                    camera.Parameters[PLCamera.Width].SetValue(width);
                }

                if (height == 0 || width > camera.Parameters[PLCamera.Height].GetMaximum())
                {
                    camera.Parameters[PLCamera.Height].SetValue(camera.Parameters[PLCamera.Height].GetMaximum());
                }
                else if (height < camera.Parameters[PLCamera.Height].GetMinimum())
                {
                    camera.Parameters[PLCamera.Height].SetValue(camera.Parameters[PLCamera.Height].GetMinimum());
                }
                else
                {
                    camera.Parameters[PLCamera.Height].SetValue(height);
                }

                camera.Parameters[PLCamera.CenterX].SetValue(true);
                camera.Parameters[PLCamera.CenterY].SetValue(true);

                camera.StreamGrabber.Start();

                if (saveTracked)
                {
                    var    expected = new OpenCvSharp.Size(1920, 1374);
                    string filename = "D:\\save\\" + DateTime.Now.ToString("M.dd-HH.mm.ss") + ".avi";
                    videoWriter.Open(filename, OpenCvSharp.FourCCValues.XVID, 14, expected, false);
                }
                if (saveOrigin)
                {
                    var    expected = new OpenCvSharp.Size(1920, 1374);
                    string filename = "D:\\save\\" + DateTime.Now.ToString("M.dd-HH.mm.ss") + ".origin.avi";
                    originWriter.Open(filename, OpenCvSharp.FourCCValues.XVID, 14, expected, false);
                }
                if (saveHisto)
                {
                    var    expected = new OpenCvSharp.Size(256, 300);
                    string filename = "D:\\save\\" + DateTime.Now.ToString("M.dd-HH.mm.ss") + ".histo.avi";
                    histoWriter.Open(filename, OpenCvSharp.FourCCValues.XVID, 14, expected, false);
                }
                if (saveHeatmap)
                {
                    var    expected = new OpenCvSharp.Size(1920, 1374);
                    string filename = "D:\\save\\" + DateTime.Now.ToString("M.dd-HH.mm.ss") + ".heatmap.avi";
                    heatmapWriter.Open(filename, OpenCvSharp.FourCCValues.XVID, 14, expected, true);
                }

                while (grabbing)
                {
                    camera.Parameters[PLCamera.Gain].SetValue(valueGain);
                    camera.Parameters[PLCamera.ExposureTime].SetValue(valueExpTime);
                    IGrabResult grabResult = camera.StreamGrabber.RetrieveResult(5000, TimeoutHandling.ThrowException);

                    using (grabResult)
                    {
                        if (grabResult.GrabSucceeded)
                        {
                            // convert image from basler IImage to OpenCV Mat
                            Mat img = convertIImage2Mat(grabResult);

                            // convert image from BayerBG to RGB
                            Cv2.CvtColor(img, img, ColorConversionCodes.BayerBG2GRAY);
                            Cv2.Resize(img, img, new OpenCvSharp.Size(1920, 1374), 0, 0, InterpolationFlags.Linear);

                            Mat histo   = new Mat();
                            Mat heatmap = new Mat();
                            Mat dst     = img.Clone();

                            // Apply Histogram
                            histo = cvProcess.histogram(dst);

                            // Apply ColorMap
                            Cv2.ApplyColorMap(dst, heatmap, ColormapTypes.Rainbow);

                            // Apply Background map subtraction
                            Cv2.Subtract(dst, -5, dst);

                            if (saveOrigin)
                            {
                                originWriter.Write(img);
                            }

                            // Create Tracked Image
                            dst = Iso11146(img, dst);

                            Cv2.Resize(dst, dst, new OpenCvSharp.Size(1920, 1374), 0, 0, InterpolationFlags.Linear);
                            if (saveTracked)
                            {
                                videoWriter.Write(dst);
                            }
                            if (saveHisto)
                            {
                                histoWriter.Write(histo);
                            }
                            if (saveHeatmap)
                            {
                                heatmapWriter.Write(heatmap);
                            }

                            // resize image  to fit the imageBox
                            Cv2.Resize(dst, dst, new OpenCvSharp.Size(960, 687), 0, 0, InterpolationFlags.Linear);
                            Cv2.Resize(heatmap, heatmap, new OpenCvSharp.Size(256, 183), 0, 0, InterpolationFlags.Linear);

                            Cv2.Rectangle(dst, new OpenCvSharp.Rect(axis_x, axis_y, axis_scale, axis_scale), Scalar.White, 1);

                            // display images
                            BitmapToImageSource(dst);
                            BitmapHistoToImageSource(histo);
                            BitmapHeatmapToImageSource(heatmap);
                        }
                        else
                        {
                            System.Windows.MessageBox.Show("Error: {0} {1}" + grabResult.ErrorCode, grabResult.ErrorDescription);
                        }
                    }
                    count++;
                    if (count > 500)
                    {
                        count    = 0;
                        tracking = false;
                    }

                    Thread.Sleep(snap_wait);
                }
                videoWriter.Release();
                originWriter.Release();
                histoWriter.Release();
                heatmapWriter.Release();
                camera.StreamGrabber.Stop();
                camera.Close();
            }
            catch (Exception exception)
            {
                if (camera.IsOpen)
                {
                    camera.Close();
                }

                System.Windows.MessageBox.Show("Exception: {0}" + exception.Message);
            }
        }
Exemple #14
0
        static void Main(string[] args)
        {
            //Thread capturaVideoThread = new Thread(new ThreadStart(Program.CapturarVideo));
            //capturaVideoThread.Start();

            VideoCapture captura = new VideoCapture("D:\\Dictuc\\out1.avi");
            VideoWriter  salida  = new VideoWriter("D:\\Dictuc\\outSegmentado.avi", FourCC.XVID, 10.0, new Size(captura.FrameWidth, captura.FrameHeight), true);

            Mat imagenProcesada = new Mat();
            int numImg          = 0;

            while (true)
            {
                //captura.Read(imagen);
                imagen = Cv2.ImRead("D:\\uvas2.jpg");
                mutex.WaitOne();
                imagen.CopyTo(imagenProcesada);
                mutex.ReleaseMutex();
                Mat          imagenRuidoFiltrado     = FiltradoRuido(imagenProcesada);
                Mat          imagenGrisContraste     = EscalaGrisesEqualizada(imagenRuidoFiltrado);
                Mat          imagenGrisFrecAltasProc = FrecuenciasAltasPotenciadasContraste(imagenGrisContraste);
                EdgeDetector edgeDetector            = new EdgeDetector()
                {
                    Threshold           = (byte)18,
                    SparseDistance      = 3,
                    WeightPreviousPoint = (float)2.0,
                    WeightCurrentPoint  = (float)1.0,
                    WeightAfterPoint    = (float)2.0,
                };

                EdgeDetector edgeDetector2 = new EdgeDetector()
                {
                    Threshold           = (byte)20,
                    SparseDistance      = 5,
                    WeightPreviousPoint = (float)0.5,
                    WeightCurrentPoint  = (float)1.0,
                    WeightAfterPoint    = (float)0.5,
                };

                Mat imagenBordes = edgeDetector.EdgeImage(imagenGrisContraste);
                Mat imagenBordes2 = edgeDetector2.EdgeImage(imagenGrisContraste);
                Mat imagenBinaria, imagenAberturaRelleno;
                CalculoMatrizBinariaYRelleno(imagenBordes2, out imagenBinaria, out imagenAberturaRelleno);

                Mat mascaraInv = 255 - imagenAberturaRelleno;

                Mat DistSureFg  = new Mat();
                Mat AreasSureFg = new Mat();
                Mat Unknown     = new Mat();
                AreasSureFg += 1;
                Cv2.DistanceTransform(imagenAberturaRelleno, DistSureFg, DistanceTypes.L1, DistanceMaskSize.Mask5);
                int numAreas = Cv2.ConnectedComponents(imagenAberturaRelleno, AreasSureFg, PixelConnectivity.Connectivity8);

                float[,] distValues = new float[DistSureFg.Rows, DistSureFg.Cols];

                for (int i = 0; i < DistSureFg.Rows; i++)
                {
                    for (int j = 0; j < DistSureFg.Cols; j++)
                    {
                        distValues[i, j] = DistSureFg.At <float>(i, j);
                    }
                }

                Segment[] segments = new Segment[numAreas];

                for (int i = 0; i < AreasSureFg.Rows; i++)
                {
                    for (int j = 0; j < AreasSureFg.Cols; j++)
                    {
                        int   m = AreasSureFg.At <Int32>(i, j);
                        byte  pixelSurrounding = 0;
                        float distance         = (float)0;

                        //if (i >= 1)
                        //{
                        //    distance = distValues[i - 1, j];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_LEFT;
                        //    }
                        //}
                        //if (i < AreasSureFg.Rows - 1)
                        //{
                        //    distance = distValues[i + 1, j];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_RIGHT;
                        //    }
                        //}
                        //if (j >= 1)
                        //{
                        //    distance = distValues[i, j - 1];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_DOWN;
                        //    }
                        //}
                        //if (j < AreasSureFg.Cols - 1)
                        //{
                        //    distance = distValues[i, j + 1];
                        //    if (distance == 2)
                        //    {
                        //        pixelSurrounding |= Segment.PIXEL_SURROUNDED_UP;
                        //    }
                        //}

                        SegmentPixelData newPixel = new SegmentPixelData()
                        {
                            Distance          = distValues[i, j],
                            CoordsXY          = new int[] { i, j },
                            Concave           = 0,
                            Indexes           = new int[] { -1, -1 },
                            PixelsSurrounding = pixelSurrounding,
                            SubsegmentLabel   = 0,
                        };

                        if (segments[m] == null)
                        {
                            segments[m] = new Segment()
                            {
                                SegmentId = m,
                                PixelData = new List <SegmentPixelData>(),
                            };
                        }
                        else
                        {
                            segments[m].MaxDistance = (segments[m].MaxDistance > newPixel.Distance) ? (int)segments[m].MaxDistance : (int)newPixel.Distance;
                            segments[m].PixelData.Add(newPixel);
                        }
                    }
                }

                Mat Centroides = new Mat();
                imagenAberturaRelleno.CopyTo(Centroides);
                var indexadorCentroides = Centroides.GetGenericIndexer <byte>();
                var indexadorFiguras    = AreasSureFg.GetGenericIndexer <Int32>();

                foreach (var s in segments.Where(s => s.Circularity <= 0.9))
                {
                    int distancia = 0;
                    if (s.Circularity > 0.7)
                    {
                        distancia = 5;
                    }
                    else if (s.Circularity > 0.5)
                    {
                        distancia = 5;
                    }
                    else if (s.Circularity > 0.25)
                    {
                        distancia = 6;
                    }
                    else
                    {
                        distancia = 6;
                    }

                    distancia = (distancia < s.MaxDistance) ? distancia : s.MaxDistance - 1;

                    foreach (var p in s.PixelData.Where(p => p.Distance <= distancia))
                    {
                        if (imagenAberturaRelleno.At <byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                        {
                            indexadorCentroides[p.CoordsXY[0], p.CoordsXY[1]] = 0;
                        }
                    }
                }

                Cv2.Subtract(imagenAberturaRelleno + 255, Centroides, Unknown);

                #region segmentStuff
                //List<int> indexConcavos = segments.Where(s => s.Circularity > 1).Select(s => s.SegmentId).ToList();


                //foreach (var s in segments.Where(s => s.Circularity < 1.1 && s.Circularity > 0.9))
                //{
                //    foreach (var p in s.PixelData/*.Where(p => p.Distance == 1)*/)
                //    {
                //        if (imagenAberturaRelleno.At<byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments.Where(s => s.Circularity >= 1.1))
                //{
                //    foreach (var p in s.PixelData/*.Where(p => p.Distance == 1)*/)
                //    {
                //        if (imagenAberturaRelleno.At<byte>(p.CoordsXY[0], p.CoordsXY[1]) != (byte)0)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments)
                //{
                //    s.SetPixelConcavity();
                //    s.Segmentation();
                //    foreach (var p in s.PixelData.Where(p => p.Distance == 1))
                //    {
                //        if (p.Concave == 1)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //        if (p.Concave == -1)
                //        {
                //            indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 255;
                //        }
                //    }
                //}

                //foreach (var s in segments)
                //{
                //    //s.SetPixelConcavity();
                //    //s.Segmentation();
                //    foreach (var p in s.PixelData.Where(p => p.Distance == 2))
                //    {
                //        indexadorFiguras[p.CoordsXY[0], p.CoordsXY[1]] = 230;
                //    }
                //}

                //imagenAberturaRelleno.CopyTo(SureFg);
                #endregion

                Mat colormap   = new Mat();
                Mat Marcadores = new Mat();
                Cv2.ConnectedComponents(Centroides, Marcadores);
                Marcadores = Marcadores + 1;
                var indexador2 = Marcadores.GetGenericIndexer <Int32>();
                for (int i = 0; i < Unknown.Rows; i++)
                {
                    for (int j = 0; j < Unknown.Cols; j++)
                    {
                        if (Unknown.At <byte>(i, j) == 255)
                        {
                            indexador2[i, j] = 0;
                        }
                    }
                }

                Marcadores.CopyTo(colormap);
                colormap.ConvertTo(colormap, MatType.CV_8UC3);
                Cv2.ApplyColorMap(colormap, colormap, ColormapTypes.Rainbow);
                Cv2.ImWrite("D:\\Dictuc\\marcadores.png", Marcadores);

                //Mat img1 = new Mat();
                //imagen.CopyTo(img1);
                Mat DistColor = new Mat();
                //imagenGrisContraste = 255 - imagenGrisContraste;
                Cv2.CvtColor(imagenAberturaRelleno, DistColor, ColorConversionCodes.GRAY2BGR);
                DistColor.ConvertTo(DistColor, MatType.CV_8U);

                Cv2.Watershed(DistColor, Marcadores);


                Cv2.ImWrite("D:\\Dictuc\\watersheedIn.png", DistColor);

                var indexador4 = imagen.GetGenericIndexer <Vec3i>();
                //for (int i = 0; i < imagen.Rows; i++)
                //{
                //    for (int j = 0; j < imagen.Cols; j++)
                //    {
                //        //if (Centroides.At<byte>(i, j) > 0)
                //        //    indexador4[i, j] = new Vec3i(0, 0, 255);
                //        if (Marcadores.At<Int32>(i, j) == -1)
                //            indexador4[i, j] = new Vec3i(255, 20, 20);
                //    }
                //}


                for (int i = 0; i < imagen.Rows; i++)
                {
                    for (int j = 0; j < imagen.Cols; j++)
                    {
                        //if (Centroides.At<byte>(i, j) > 0)
                        //    indexador4[i, j] = new Vec3i(0, 0, 255);
                        if (imagenBordes.At <char>(i, j) > 0)
                        {
                            indexador4[i, j] = new Vec3i(255, 20, 20);
                        }
                    }
                }

                Mat seg = new Mat();
                Marcadores.CopyTo(seg);
                var indexador5 = seg.GetGenericIndexer <int>();
                for (int i = 0; i < Marcadores.Rows; i++)
                {
                    for (int j = 0; j < Marcadores.Cols; j++)
                    {
                        indexador5[i, j] = (Math.Abs(indexador5[i, j]) > 1) ? 255 : 0;
                    }
                }
                Mat kE1 = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(1, 1));
                Cv2.Erode(seg, seg, kE1, iterations: 3);
                int thrs1 = 1500;
                int thrs2 = 1800;
                Mat edge1 = new Mat();
                seg.ConvertTo(seg, MatType.CV_8U);
                Cv2.Canny(seg, edge1, thrs1, thrs2, apertureSize: 5);

                SimpleBlobDetector.Params params1 = new SimpleBlobDetector.Params()
                {
                    MinThreshold        = 0,
                    MaxThreshold        = 255,
                    FilterByArea        = true,
                    MinArea             = 15,
                    FilterByCircularity = false,
                    MinCircularity      = (float)0.01,
                    FilterByConvexity   = false,
                    MinConvexity        = (float)0.1,
                    FilterByInertia     = false,
                    MinInertiaRatio     = (float)0.01,
                };
                SimpleBlobDetector detectorBlobs = SimpleBlobDetector.Create(params1);
                KeyPoint[]         segmentosBlob = detectorBlobs.Detect(edge1);

                Mat segmentosBlobMat = new Mat(1, segmentosBlob.Count(), MatType.CV_32FC1);
                var indexador6       = segmentosBlobMat.GetGenericIndexer <float>();
                for (int i = 0; i < segmentosBlob.Count(); i++)
                {
                    indexador6[0, i] = segmentosBlob[i].Size;
                }

                Mat      hist   = new Mat();
                Rangef[] ranges = { new Rangef(0, (float)segmentosBlob.Max(x => x.Size)) };
                Cv2.CalcHist(new Mat[] { segmentosBlobMat }, new int[] { 0 }, null, hist, 1, new int[] { 100 }, ranges, uniform: true, accumulate: true);
                float[] histAcumulado           = new float[hist.Rows];
                float[] histAcumuladoPorcentaje = new float[11];

                histAcumulado[0] = hist.At <float>(0, 0);

                for (int i = 1; i < hist.Rows; i++)
                {
                    histAcumulado[i] = hist.At <float>(i, 0) + histAcumulado[i - 1];
                }

                int k = 1;
                for (int i = 1; i < histAcumuladoPorcentaje.Count(); i++)
                {
                    for (; k < hist.Rows; k++)
                    {
                        float porcentajeActual    = histAcumulado[k] / segmentosBlob.Count() * 100;
                        float porcentajeAnterior  = histAcumulado[k - 1] / segmentosBlob.Count() * 100;
                        float porcentajeRequerido = (float)((i < 10) ? i * 10 : 99.3);
                        if (porcentajeRequerido <= porcentajeActual)
                        {
                            float tamañoPorcentajeActual        = (float)(k * (float)segmentosBlob.Max(x => x.Size) / 100.0);
                            float tamañoPorcentajeAnterior      = (float)((k - 1) * (float)segmentosBlob.Max(x => x.Size) / 100.0);
                            float tasaVariacionTamañoPorcentaje = (tamañoPorcentajeActual - tamañoPorcentajeAnterior) / (porcentajeActual - porcentajeAnterior);
                            histAcumuladoPorcentaje[i] = tamañoPorcentajeAnterior + tasaVariacionTamañoPorcentaje * (i * 10 - porcentajeAnterior);
                            break;
                        }
                    }
                }

                for (int i = 0; i < histAcumuladoPorcentaje.Count(); i++)
                {
                    Console.Write(histAcumuladoPorcentaje[i] + ",");
                }
                Console.WriteLine("");

                //            data1 = [];

                //              for i in range(0, len(keypoints1)):

                //                data1.append(keypoints1[i].size * coefTamano)
                //                #tamano.write(str(i)+'\t'+str(keypoints1[i].size*2*0.3)+'\n')
                //  cv2.line(im_with_keypoints1, (int(float(keypoints1[i].pt[0] - keypoints1[i].size)), int(float(keypoints1[i].pt[1]))), (int(float(keypoints1[i].pt[0] + keypoints1[i].size)), int(float(keypoints1[i].pt[1]))), (255, 0, 0), 1)

                //                cv2.line(im_with_keypoints1, (int(float(keypoints1[i].pt[0])), int(float(keypoints1[i].pt[1] - keypoints1[i].size))), (int(float(keypoints1[i].pt[0])), int(float(keypoints1[i].pt[1] + keypoints1[i].size))), (255, 0, 0), 1)


                //# print(data1)
                //n1, bins1, patches1 = hist(data1, 200,[0, max(data1)], normed = 100, cumulative = True, bottom = True, histtype = 'stepfilled', align = 'mid', orientation = 'vertical', rwidth = 1, log = False, color = "r")

                //              tamano = open(temp + "instancia_" + instancia + ".txt", "w")


                //              x = np.array(bins1)

                //              y = np.append([0], n1)

                //                  xnew = [x[1], x[21], x[36], x[45], x[53], x[60], x[69], x[78], x[88], x[97], x[200]]
                //ynew = [y[1], y[21], y[36], y[45], y[53], y[60], y[69], y[78], y[88], y[97], y[200]]

                //tamano.write('INSERT INTO [dbo].[Granulometria](Cod_Instancia,Fecha,P_10,P_20,P_30,P_40,P_50,P_60,P_70,P_80,P_90,P_100, Filename) values (')
                //tamano.write(instancia + ",CONVERT(datetime, '" + sys.argv[1][0:4] + "-" + sys.argv[1][4:6] + "-" + sys.argv[1][6:8] + ' ' + sys.argv[1][9:11] + ':' + sys.argv[1][11:13] + ':' + sys.argv[1][13:15] + "', 120)")

                //for j in range(1, len(xnew)):
                //  #tamano.write (str(j)+'\t'+str(round(xnew[j],1))+'\t'+str(round(ynew[j]*100,2))+'\n')
                //  tamano.write(',' + str(round(xnew[j], 1)))

                //tamano.write(",'" + sys.argv[1] + " - Resultado.jpg'")
                //tamano.write(')')

                //CvXImgProc.Thinning(mascaraInv, mascaraInv, ThinningTypes.ZHANGSUEN);

                Mat imWithKeypoints1 = new Mat();
                Cv2.DrawKeypoints(imagen, segmentosBlob, imWithKeypoints1, new Scalar(0, 0, 255), DrawMatchesFlags.DrawRichKeypoints);


                var dataTamaños = segmentosBlob.Select(s => s.Size).ToArray();


                Cv2.ImWrite("D:\\Dictuc\\output0" + numImg + ".png", imagen);
                Cv2.ImWrite("D:\\Dictuc\\output1" + numImg++ + ".png", imWithKeypoints1);

                Cv2.ImShow("Segmentado", imagen);
                Cv2.ImShow("GrisContraste", imagenGrisContraste);
                Cv2.ImShow("bordes90", imagenBordes);
                Cv2.ImShow("bordes50", imagenBordes2);

                salida.Write(imagen);

                //System.Threading.Thread.Sleep(10);
                Cv2.WaitKey(10);

                imagenRuidoFiltrado.Release();
                imagenGrisContraste.Release();
                imagenGrisFrecAltasProc.Release();
                imagenBordes.Release();
                imagenBinaria.Release();
                imagenAberturaRelleno.Release();
            }
        }