Exemple #1
0
        private List <Rect> GetSymbolRects(Mat plateNum)
        {
            Mat edgedPlate = new Mat();
            var blPlate    = MakePictureBlurred(plateNum);

            Cv2.Canny(blPlate, edgedPlate, 50, 200);

            var contours  = new Mat[50];
            Mat hierarchy = new Mat();

            Cv2.FindContours(edgedPlate, out contours, hierarchy, RetrievalModes.List, ContourApproximationModes.ApproxSimple);


            List <Rect> symbols = new List <Rect>();

            foreach (var cnt in contours)
            {
                var rect = Cv2.BoundingRect(cnt);
                if (rect.Width < 15 || rect.Width > 40 || rect.Height < 30 || rect.Height > 60)
                {
                    continue;
                }

                symbols.Add(rect);
            }

            return(symbols);
        }
Exemple #2
0
        static Mat matematika_tiesei(Mat color_img, out Mat edges)
        {
            Mat gray = new Mat();

            Cv2.CvtColor(color_img, gray, ColorConversion.RgbToGray);

            edges = new Mat();
            Cv2.Canny(gray, edges, 50, 50);
            //Cv2.Threshold(gray, edges, 100, 255, ThresholdType.BinaryInv);

            CvLineSegmentPolar[] lines = Cv2.HoughLines(edges, 1, Cv.PI / 360, 200, 0, 0); //houghline p greitesnis

            foreach (CvLineSegmentPolar line in lines)
            {
                double a      = Math.Cos(line.Theta);
                double b      = Math.Sin(line.Theta);
                double x0     = a * line.Rho;
                double y0     = b * line.Rho;
                Point  first  = new Point(x0 + (1000 * (-b)), y0 + 1000 * a);
                Point  second = new Point(x0 - (1000 * (-b)), y0 - 1000 * a);
                //Cv2.Line(color_img, first, second, Scalar.Red, 1);

                // if ((line.Theta >= 0) && (line.Theta < 0.5))
                Cv2.Line(color_img, first, second, Scalar.Green, 5);

                //if ((line.Theta > Cv.PI / 180 * 89) && (line.Theta < Cv.PI / 180 * 91))
                Cv2.Line(color_img, first, second, Scalar.Green, 5);
            }

            return(color_img);
        }
Exemple #3
0
        private static Point[][] GetContours(Mat srcNumberOrg, out HierarchyIndex[] hierarchyIndexesOrg)
        {
            var dstGray = new Mat();

            Cv2.Threshold(srcNumberOrg, dstGray, 80, 80, ThresholdTypes.Binary);
            var hhh    = srcFilename.Split('.');
            var saveTo = path + hhh[0] + "_gray.jpg";

            Cv2.ImWrite(saveTo, dstGray);
            var dstNumberOrg = new Mat();

            Cv2.Canny(srcNumberOrg, dstNumberOrg, 80, 80); //src

            // todo mb sobel könnte deutlich besser funktionieren?! siehe bild von aforge.net
            //Cv2.Sobel(srcNumberOrg, dstNumberOrg, MatType.CV_16S, 80, 80);
            Point[][] contoursOrg;
            // HierarchyIndex[] hierarchyIndexesOrg;
            Cv2.FindContours(
                dstNumberOrg,
                out contoursOrg,
                out hierarchyIndexesOrg,
                mode: RetrievalModes.External, // nur die äußeren
                method: ContourApproximationModes.ApproxNone);
            Console.WriteLine("Conturs found: " + contoursOrg.Length);
            if (contoursOrg.Length == 0)
            {
                throw new NotSupportedException("Couldn't find any object in the image.");
            }

            return(contoursOrg);
        }
Exemple #4
0
        public static Mat DetectEdges(Mat src)
        {
            var dst = new Mat();

            Cv2.Canny(src, dst, 50, 200);
            return(dst);
        }
Exemple #5
0
        private OpenCvSharp.Point getOMRImage(String path)
        {
            int offset = 100;

            Mat src, gray, binary, canny;

            src = Cv2.ImRead(path);
            Rect rect = new Rect(offset, offset, src.Width - offset * 2, src.Height - offset * 2);

            src = src.SubMat(rect);

            gray   = new Mat();
            binary = new Mat();
            canny  = new Mat();

            Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
            Cv2.Threshold(gray, binary, 150, 255, ThresholdTypes.Binary);
            Cv2.Canny(binary, canny, 0, 0, 3);

            // width, height
            OpenCvSharp.Point pt = projectPerspective(src, canny);
            src.Dispose();
            gray.Dispose();
            binary.Dispose();
            canny.Dispose();

            return(pt);
        }
Exemple #6
0
        static void Main(string[] args)
        {
            Mat src    = Cv2.ImRead("card.jpg");
            Mat gray   = new Mat();
            Mat binary = new Mat();
            Mat morp   = new Mat();
            Mat canny  = new Mat();
            Mat dst    = src.Clone();

            Mat kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));

            Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
            Cv2.Threshold(gray, binary, 150, 255, ThresholdTypes.Binary);
            Cv2.Dilate(binary, morp, kernel, new Point(-1, -1));
            Cv2.Erode(morp, morp, kernel, new Point(-1, -1), 3);
            Cv2.Dilate(morp, morp, kernel, new Point(-1, -1), 2);
            Cv2.Canny(morp, canny, 0, 0, 3);

            LineSegmentPoint[] lines = Cv2.HoughLinesP(canny, 1, Cv2.PI / 180, 140, 50, 10);

            for (int i = 0; i < lines.Length; i++)
            {
                Cv2.Line(dst, lines[i].P1, lines[i].P2, Scalar.Yellow, 2);
            }

            Cv2.ImShow("dst", dst);
            Cv2.WaitKey(0);
            Cv2.DestroyAllWindows();
        }
    void f1()
    {
        Mat mainMat = new Mat(ttexture.height, ttexture.width, MatType.CV_8UC3);
        Mat grayMat = new Mat();

        mainMat = OpenCvSharp.Unity.TextureToMat(ttexture);
        Cv2.CvtColor(mainMat, grayMat, ColorConversionCodes.BGR2GRAY);
        Cv2.GaussianBlur(grayMat, grayMat, new Size(5, 5), 0);
        Cv2.Canny(grayMat, grayMat, 10.0, 70.0);
        Cv2.FastNlMeansDenoising(grayMat, grayMat, 3, 7, 21);
        Cv2.Threshold(grayMat, grayMat, 70.0, 255.0, ThresholdTypes.BinaryInv);

        texture1 = OpenCvSharp.Unity.MatToTexture(grayMat);

        Color[] pixels = texture1.GetPixels(0, 0, texture1.width, texture1.height, 0);
        for (int p = 0; p < pixels.Length; p++)
        {
            if (pixels[p].Equals(new Color(1, 1, 1, 1)))
            {
                pixels[p] = new Color(0, 0, 0, 0);
            }
        }
        texture1.SetPixels(0, 0, texture1.width, texture1.height, pixels, 0);
        texture1.Apply();
        m1.mainTexture = texture1;
        //m_RawImage.texture = texture1;
    }
        // Our sketch generation function
        protected override bool ProcessTexture(WebCamTexture input, ref Texture2D output)
        {
            Mat img = Unity.TextureToMat(input, TextureParameters);

            //Convert image to grayscale
            Mat imgGray = new Mat();

            Cv2.CvtColor(img, imgGray, ColorConversionCodes.BGR2GRAY);

            // Clean up image using Gaussian Blur
            Mat imgGrayBlur = new Mat();

            Cv2.GaussianBlur(imgGray, imgGrayBlur, new Size(5, 5), 0);

            //Extract edges
            Mat cannyEdges = new Mat();

            Cv2.Canny(imgGrayBlur, cannyEdges, 70.0, 1.0);

            //Do an invert binarize the image
            Mat mask = new Mat();

            Cv2.Threshold(cannyEdges, mask, 70.0, 255.0, ThresholdTypes.Binary);

            // result, passing output texture as parameter allows to re-use it's buffer
            // should output texture be null a new texture will be created
            output = Unity.MatToTexture(mask, output);
            return(true);
        }
Exemple #9
0
        public static void cv_12()  // 边缘检测系列之 Canny算子
        {
            //Canny算法中,先在 X 和 Y 方向求得一阶导数,
            //然后将它们组合成4个方向 的导数。
            //其中方向导数是局部最大值的点是组成边缘的候选项。
            //Canny算法最明显的创新,就是将单个的边缘候选像素加入轮廓。

            Mat srcImg   = Cv2.ImRead(@"G:\\pics\\6.jpg");
            Mat CannyImg = new Mat();

            Mat gussImage = new Mat();

            Cv2.GaussianBlur(srcImg, gussImage, new OpenCvSharp.Size(3, 3), 0, 0, BorderTypes.Default);    //高斯模糊

            Mat grayImage = new Mat();

            Cv2.CvtColor(gussImage, grayImage, ColorConversionCodes.RGB2GRAY);   //灰度图

            Cv2.Canny(grayImage, CannyImg, 50, 150, 3, true);
            //cannny算子。参数:1,8 bit 输入图像;2,输出边缘图像,一般是二值图像,背景是黑色;
            //3,低阈值。值越大,找到的边缘越少;4,高阈值;5,表示应用Sobel算子的孔径大小,其有默认值3;6,计算图像梯度幅值的标识,有默认值false。
            //低于阈值1的像素点会被认为不是边缘;
            //高于阈值2的像素点会被认为是边缘;
            //在阈值1和阈值2之间的像素点,若与一阶偏导算子计算梯度得到的边缘像素点相邻,则被认为是边缘,否则被认为不是边缘。

            Cv2.ImShow("srcImg", srcImg);
            Cv2.ImShow("gussImage", gussImage);
            Cv2.ImShow("CannyImg", CannyImg);

            Cv2.WaitKey();
        }
Exemple #10
0
        public static void cv_06()  // 边缘轮廓的检测/查找
        {
            Mat srcImage = Cv2.ImRead(@"G:\\pics\\123.jpg", ImreadModes.Color);
            Mat src_gray = new Mat();

            Cv2.CvtColor(srcImage, src_gray, ColorConversionCodes.RGB2GRAY); //转换为灰度图
            Cv2.Blur(src_gray, src_gray, new OpenCvSharp.Size(2, 2));        //滤波

            Mat canny_Image = new Mat();

            Cv2.Canny(src_gray, canny_Image, 100, 200);      //Canny边缘检测

            OpenCvSharp.Point[][] contours;
            HierarchyIndex[]      hierarchly;
            Cv2.FindContours(canny_Image, out contours, out hierarchly, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0)); //获得轮廓

            Mat    dst_Image = Mat.Zeros(canny_Image.Size(), srcImage.Type());                                                                                     // 图片像素值归零
            Random rnd       = new Random();

            for (int i = 0; i < contours.Length; i++)
            {
                Scalar color = new Scalar(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
                Cv2.DrawContours(dst_Image, contours, i, color, 2, LineTypes.Link8, hierarchly);       //画出轮廓
            }
            //return dst_Image;   //返回结果
            Cv2.ImShow("dst_Image", dst_Image);
            Cv2.ImShow("canny_Image", canny_Image);
            Cv2.WaitKey();
            //return canny_Image;
        }
Exemple #11
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            try
            {
                Console.WriteLine("Tick");

                cap.Read(frame);
                OpenCvSharp.Size newsize = new OpenCvSharp.Size(pictureBox1.Width, pictureBox1.Height);
                frame = frame.Resize(newsize);

                Cv2.Canny(frame, dstframe, 20, 100);

                Bitmap tempimage1 = BitmapConverter.ToBitmap(frame);
                Bitmap tempimage2 = BitmapConverter.ToBitmap(dstframe);

                pictureBox1.Image = tempimage1;
                pictureBox2.Image = tempimage2;

                //Cv2.WaitKey(10);
            }
            catch (Exception extdf)
            {
                Console.WriteLine(extdf.ToString());
            }
        }
Exemple #12
0
        private Mat Canny(Mat picture, Mat result)
        {
            Data_th th_data = th.Get_Data();

            Cv2.Canny(picture, result, th_data.Get_Cth1(), th_data.Get_Cth2());    // 임계값 1이하에 포함되는 가장자리는 제외, 2이상 포함되는 가장자리는 가장자리로 간주.
            return(result);
        }
Exemple #13
0
        public static Mat CannyEdge(Mat mat, int lowerThreshold = 50, int higherThreshold = 200)
        {
            Mat cannyMat = new Mat();

            Cv2.Canny(mat, cannyMat, lowerThreshold, higherThreshold);
            return(cannyMat);
        }
Exemple #14
0
        private void bn_Canny_Click(object sender, RoutedEventArgs e)
        {
            if (listImage.Count > 0)
            {
                SubWindow.Win_Canny win = new SubWindow.Win_Canny();
                if (win.ShowDialog() == true)
                {
                    string strTitle = listImage[_nSelWin].Title;
                    Mat    matSrc   = listImage[_nSelWin].fn_GetImage();
                    Mat    matDst   = new Mat();
                    double dSigma   = 0.0;
                    double dLowTh   = 0.0;
                    double dHighTh  = 0.0;
                    double.TryParse(win.tb_Sigma.Text, out dSigma);
                    double.TryParse(win.tb_LowTh.Text, out dLowTh);
                    double.TryParse(win.tb_HighTh.Text, out dHighTh);
                    int width  = matSrc.Cols;
                    int height = matSrc.Rows;
                    timeStart = DateTime.Now;

                    Cv2.GaussianBlur(matSrc, matDst, new OpenCvSharp.Size(3, 3), dSigma);
                    Cv2.Canny(matDst, matDst, dLowTh, dHighTh);

                    fn_WriteLog($"[Canny] {strTitle} ({(DateTime.Now - timeStart).TotalMilliseconds} ms)");
                    fn_NewImage(matDst, $"{strTitle}_Canny");
                }
            }
        }
        public static void WriteVideo()
        {
            Size dsize = new Size(640, 480);

            // Opens a camera device
            using (VideoCapture capture = new VideoCapture(0))
                // Read movie frames and write them to VideoWriter
                using (VideoWriter writer = new VideoWriter("out.avi", -1, capture.Fps, dsize))
                    using (Mat frame = new Mat())
                        using (Mat gray = new Mat())
                            using (Mat canny = new Mat())
                                using (Mat dst = new Mat())
                                {
                                    Console.WriteLine("Converting each movie frames...");
                                    while (true)
                                    {
                                        // Read image
                                        capture.Read(frame);
                                        if (frame.Empty())
                                        {
                                            break;
                                        }

                                        //Console.CursorLeft = 0;
                                        //Console.Write("{0} / {1}", capture.PosFrames, capture.FrameCount);

                                        // grayscale -> canny -> resize
                                        Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                                        Cv2.Canny(gray, canny, 100, 180);
                                        Cv2.Resize(canny, dst, dsize, 0, 0, InterpolationFlags.Linear);
                                        // Write mat to VideoWriter
                                        writer.Write(dst);
                                    }
                                }
        }
Exemple #16
0
 private void mnuFilterEdgeCanny_Click(object sender, EventArgs e)
 {
     // Canny Edge
     Cv2.Canny(_matDisp, _matDisp, 100.0, 150.0);
     // 画像の描画
     DrawMatImage(_matDisp);
 }
        public dynamic PreProcessImage(ref Mat image, Mat sourceImage)
        {
            var copy = new Mat();

            try
            {
                Cv2.BilateralFilter(image, copy, 9, 75, 75);
                Cv2.AdaptiveThreshold(copy, copy, 255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.Binary, 115, 4);
                Cv2.MedianBlur(copy, copy, 11);
                Cv2.CopyMakeBorder(copy, copy, 5, 5, 5, 5, BorderTypes.Constant, Scalar.Black);

                // TODO: Dispose new Mat()
                var otsu = Cv2.Threshold(copy, new Mat(), 0, 255, ThresholdTypes.Binary | ThresholdTypes.Otsu);
                Cv2.Canny(copy, copy, otsu, otsu * 2, 3, true);
            }
            catch
            {
                copy.Dispose();
                throw;
            }

            image.Dispose();
            image = copy;

            return(null);
        }
Exemple #18
0
        private void ProcImage5(ref System.Drawing.Bitmap src, ref System.Drawing.Bitmap srcB, out System.Drawing.Bitmap dst)
        {
            dst = null;
            Mat tmp     = new Mat();
            var element = Cv2.GetStructuringElement(
                MorphShapes.Rect,
                new OpenCvSharp.Size(2 * 2 + 1, 2 * 2 + 1),
                new OpenCvSharp.Point(2, 2));

            Mat srcImg = BitmapConverter.ToMat(src);

            Cv2.CvtColor(srcImg, srcImg, ColorConversionCodes.BGRA2BGR);
            Mat edge = new Mat();

            Cv2.Canny(srcImg, edge, 16, 50);

            //Cv2.MorphologyEx(edge, tmp, MorphTypes.Open, element);
            Cv2.MorphologyEx(edge, tmp, MorphTypes.Close, element, null, 25);

            Cv2.MorphologyEx(tmp, edge, MorphTypes.Open, element, null, 3);

            Cv2.MorphologyEx(edge, tmp, MorphTypes.Close, element, null, 10);
            Mat srcImgB = BitmapConverter.ToMat(srcB);

            Cv2.CvtColor(srcImgB, srcImgB, ColorConversionCodes.BGRA2BGR);
            Mat edgeB = new Mat();

            Cv2.Canny(srcImgB, edgeB, 16, 50);

            Mat mask = new Mat();

            //Cv2.Absdiff(edge, edgeB, mask);

            dst = BitmapConverter.ToBitmap(tmp);
        }
        private Mat FindContours(Mat srcImage)
        {
            Mat src_gray = new Mat();

            Cv2.CvtColor(srcImage, src_gray, ColorConversionCodes.RGB2GRAY);
            Cv2.Blur(src_gray, src_gray, new OpenCvSharp.Size(2, 2));

            Mat cannyImage = new Mat();

            Cv2.Canny(src_gray, cannyImage, 100, 200);

            OpenCvSharp.Point[][] contours;
            HierarchyIndex[]      hierarchly;
            Cv2.FindContours(cannyImage, out contours, out hierarchly, RetrievalModes.Tree,
                             ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

            Mat dstImage = Mat.Zeros(cannyImage.Size(), srcImage.Type());

            Random rnd = new Random();

            for (int i = 0; i < contours.Length; i++)
            {
                Scalar color = new Scalar(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
                Cv2.DrawContours(dstImage, contours, i, color, 2, LineTypes.Link8, hierarchly);
            }

            return(dstImage);
        }
Exemple #20
0
        public void Run()
        {
            const string OutVideoFile = "out.avi";

            // Opens MP4 file (ffmpeg is probably needed)
            VideoCapture capture = new VideoCapture(FilePath.Bach);

            // Read movie frames and write them to VideoWriter
            Size dsize = new Size(640, 480);

            using (VideoWriter writer = new VideoWriter(OutVideoFile, -1, capture.Fps, dsize))
            {
                Console.WriteLine("Converting each movie frames...");
                Mat frame = new Mat();
                while (true)
                {
                    // Read image
                    capture.Read(frame);
                    if (frame.Empty())
                    {
                        break;
                    }

                    Console.CursorLeft = 0;
                    Console.Write("{0} / {1}", capture.PosFrames, capture.FrameCount);

                    // grayscale -> canny -> resize
                    Mat gray  = new Mat();
                    Mat canny = new Mat();
                    Mat dst   = new Mat();
                    Cv2.CvtColor(frame, gray, ColorConversion.BgrToGray);
                    Cv2.Canny(gray, canny, 100, 180);
                    Cv2.Resize(canny, dst, dsize, 0, 0, Interpolation.Linear);
                    // Write mat to VideoWriter
                    writer.Write(dst);
                }
                Console.WriteLine();
            }

            // Watch result movie
            using (VideoCapture capture2 = new VideoCapture(OutVideoFile))
                using (Window window = new Window("result"))
                {
                    int sleepTime = (int)(1000 / capture.Fps);

                    Mat frame = new Mat();
                    while (true)
                    {
                        capture2.Read(frame);
                        if (frame.Empty())
                        {
                            break;
                        }

                        window.ShowImage(frame);
                        Cv2.WaitKey(sleepTime);
                    }
                }
        }
Exemple #21
0
        public static Mat EdgeDetector(this Mat image)
        {
            // Mat src = Cv2.ImRead("lenna.png", ImreadModes.GrayScale);
            Mat dst = new Mat();

            Cv2.Canny(image, dst, 50, 200);
            return(dst);
        }
        private Bitmap CannyFilter(Bitmap originalBitmap)
        {
            var originalMat = BitmapConverter.ToMat(originalBitmap);
            var cannyMat    = new Mat();

            Cv2.Canny(originalMat, cannyMat, 50, 100);
            return(BitmapConverter.ToBitmap(cannyMat));
        }
Exemple #23
0
        public Mat GetConvertImage(Mat input)
        {
            Mat result = new Mat();

            Cv2.Canny(input, result, _threshold1, _threshold2);

            return(result);
        }
Exemple #24
0
        public static Mat convertImage(Mat origional)
        {
            Mat newMat = new Mat();

            origional.CopyTo(newMat);
            Cv2.Canny(origional, newMat, 200, 300);
            return(newMat);
        }
Exemple #25
0
 static void Canny(Mat src, Mat dst)
 {
     Cv2.Canny(src, dst, 50, 200);
     Cv2.ImShow("元画像", src);
     Cv2.ImShow("dst", dst);
     Cv2.WaitKey();
     Cv2.DestroyAllWindows();
 }
        // https://docs.opencv.org/2.4/modules/imgproc/doc/filtering.html?highlight=sobel#sobel

        //Canny filter
        public static Mat C_Image_Canny(Mat image, double thres1)
        {
            Mat image_copy = new Mat();

            Cv2.Canny(image, image_copy, thres1, thres1, 3, true);
            //Cv2.Canny(image, image_copy, thres1, thres1*2,3,true);
            return(image_copy);
        }
Exemple #27
0
        public override Photo?Process(Photo photo, CannyThresholdParameter parameters)
        {
            var input  = ImageConverter.Photo2Mat(photo);
            var output = new Mat();

            Cv2.Canny(input, output, parameters.ThresholdMin, parameters.ThresholdMax);

            return(ImageConverter.Mat2Photo(output));
        }
Exemple #28
0
        static void Main(string[] args)
        {
            using var src = new Mat("example.jpg", ImreadModes.Grayscale);
            using var dst = new Mat();

            Cv2.Canny(src, dst, 50, 200);
            Cv2.ImWrite("example_canny.jpg", dst);
            Console.WriteLine("Example done!");
        }
Exemple #29
0
        public static void EdgeDetection(string fileName)
        {
            Mat src = new Mat(Path.Combine(INPUT_PATH, fileName), ImreadModes.AnyColor);
            Mat dst = new Mat();

            Cv2.Canny(src, dst, 500, 600);

            Cv2.ImWrite(Path.Combine(EDGEDETECTION_PATH, fileName), dst);
        }
    public override void Actualizar(IEsCable nodo)
    {
        if (nodo == null)
        {
            return;
        }

        var matEntrante = nodo.MatOut();

        if (matEntrante == null)
        {
            return;
        }
        if (apertureSize % 2 == 0)
        {
            apertureSize--;
        }
        if (apertureSize < 3)
        {
            apertureSize = 3;
        }
        else if (apertureSize > 7)
        {
            apertureSize = 7;
        }

        if (sobreescribirMat)
        {
            if (mat != matEntrante)
            {
                mat = matEntrante;
            }
        }
        else
        {
            if (mat == null)
            {
                mat = new Mat(matEntrante.Size(), matEntrante.Type());
            }
            else
            {
                if (mat.Width != matEntrante.Width || mat.Height != matEntrante.Height)
                {
                    if (mat.Width * mat.Height == matEntrante.Width * matEntrante.Height)
                    {
                        mat.Reshape(0, matEntrante.Rows);
                    }
                    else
                    {
                        mat = matEntrante.Clone();
                    }
                }
            }
        }
        Cv2.Canny(matEntrante, mat, umbralUno, umbralDos, apertureSize, L2gradient);
        PropagarActualizacion();
    }