예제 #1
0
        public static Mat AddNoise(this Mat image, double noiseLevel = 0.75)
        {
            if (noiseLevel < 0 || noiseLevel > 1.0)
            {
                throw new ArgumentException("noiseLevel must be between 0.0 and 1.0");
            }
            if (noiseLevel == 0)
            {
                return(image);
            }

            var       mean      = new MCvScalar(0);
            var       std       = new MCvScalar(255);
            const int gaussSize = 13;

            var output = new Mat();
            var noise  = new Mat(image.Size, DepthType.Cv8U, image.NumberOfChannels);

            using (ScalarArray scalarArray1 = new ScalarArray(mean))
                using (ScalarArray scalarArray2 = new ScalarArray(std))
                {
                    CvInvoke.Randn(noise, scalarArray1, scalarArray2);
                }
            CvInvoke.GaussianBlur(noise, noise, new Size(gaussSize, gaussSize), 0.0);
            CvInvoke.AddWeighted(image, 1 - noiseLevel, noise, noiseLevel, 0, output, image.Depth);

            return(output);
        }
예제 #2
0
        private void button_run_Click(object sender, EventArgs e)
        {
            tab.SelectedIndex = 2;
            double a = (double)value_a.Value;
            double b = (double)value_b.Value;
            double p = (double)value_p.Value;
            double t = (double)value_t.Value;

            if (t == 0)
            {
                imageBox3.Image = left.origin;
                return;
            }
            else if (t == 1)
            {
                imageBox3.Image = right.origin;
                return;
            }
            list_left  = left.LineList;
            list_right = right.LineList;
            List <SupLine> list_dest = new List <SupLine>();

            for (int i = 0; i < list_left.Count; i++)
            {
                list_dest.Add(SupLine.Interpolation(list_left[i], list_right[i], t));
            }
            Image <Bgr, byte> warp_left  = left.Warp(list_dest, a, b, p);
            Image <Bgr, byte> warp_right = right.Warp(list_dest, a, b, p);

            imageBox3.Image = new Image <Bgr, byte>(warp_left.Size);
            CvInvoke.AddWeighted(warp_left, 1 - t, warp_right, t, 0, imageBox3.Image);
        }
예제 #3
0
        public static UMat getSharpened(UMat img, double sigma, double amount)
        {
            /// <summary>
            /// Apply sharpening to the given UMat.
            /// </summary>
            /// <param name="img">The src UMat.</param>
            /// <param name="sigma">Sigma value for shrpening.</param>
            /// <param name="amount">Amount of sharpening required.</param>
            /// <returns>
            /// Sharpened UMat image.
            /// </returns>
            /// <remarks>
            /// Used for image sharpening.
            /// </remarks>
            ///


            UMat dblImg     = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat dblBlurImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat outImg     = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            int k = 2 * (int)Math.Round(3.0 * sigma) + 1;

            CvInvoke.GaussianBlur(dblImg, dblBlurImg, new Size(k, k), sigma, sigma);
            CvInvoke.AddWeighted(dblImg, 1.0 + amount, dblBlurImg, -amount, 0, outImg);
            dblImg.Dispose();
            dblBlurImg.Dispose();
            img.Dispose();
            return(outImg);
        }
예제 #4
0
        private Image <Gray, byte> ThresholdImage(Image <Hsv, byte> original, Point[] ranges)
        {
            List <Image <Gray, byte> > images = new List <Image <Gray, byte> >();

            for (int i = 0; i < ranges.Length; i++)
            {
                images.Add(original.InRange(new Hsv(ranges[i].X, lowSaturation, lowValue), new Hsv(ranges[i].Y, 255, 255)));
            }

            Image <Gray, byte> threshold = new Image <Gray, byte>(original.Size);

            for (int j = 0; j < images.Count; j++)
            {
                CvInvoke.AddWeighted(images[j], 1, threshold, 1, 0, threshold);
            }

            threshold.SmoothGaussian(9);

            Image <Gray, byte> temp = threshold.Clone();
            var element             = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Cross, new Size(3, 3), new Point(-1, -1));

            // opening
            CvInvoke.Erode(threshold, temp, element, new Point(-1, -1), 3, Emgu.CV.CvEnum.BorderType.Reflect, default(MCvScalar));
            CvInvoke.Dilate(temp, threshold, element, new Point(-1, -1), 2, Emgu.CV.CvEnum.BorderType.Reflect, default(MCvScalar));

            //// closing
            //CvInvoke.Dilate(threshold, temp, element, new Point(-1, -1), 2, Emgu.CV.CvEnum.BorderType.Reflect, default(MCvScalar));
            //CvInvoke.Erode(temp, threshold, element, new Point(-1, -1), 2, Emgu.CV.CvEnum.BorderType.Reflect, default(MCvScalar));

            return(threshold);
        }
예제 #5
0
 public override bool Do()
 {
     try
     {
         int id1 = IntPropertys["Src1ID"].Value;
         int id2 = IntPropertys["Src2ID"].Value;
         for (int SourcesId = 0; SourcesId < Sources.Count; SourcesId++)
         {
             var srcDatas = Sources[SourcesId].GetOut();
             if ((id1 < srcDatas.Count) && (id2 < srcDatas.Count))
             {
                 Mat resImg = new Mat();
                 CvInvoke.AddWeighted(srcDatas[id1].Image, FloatPropertys["Src1"].Value,
                                      srcDatas[id2].Image, FloatPropertys["Src2"].Value,
                                      FloatPropertys["Gamma"].Value,
                                      resImg, DepthType.Cv8U);
                 this.m_out.Add(new DataSrc(resImg, srcDatas[id1].Info, false));
             }
         }
     }
     catch (Exception ex)
     {
         Console.WriteLine(ex.Message);
         return(false);
     }
     return(true);
 }
예제 #6
0
        public void SavePreprocess(List <CircleF> list_Pupil, String LorR)
        {
            if (img_Gray != null && img_Threshold != null && img_Edge != null)
            {
                img_Gray.Save(LorR + "\\" + "img_Gray_" + LorR + ".jpg");
                img_Bgr.Save(LorR + "\\" + "img_Bgr_" + LorR + ".jpg");
                img_Threshold.Save(LorR + "\\" + "img_Threshold" + LorR + ".jpg");
                img_Edge.Save(LorR + "\\" + "img_Edge" + LorR + ".jpg");
                img_Ada.Save(LorR + "\\" + "img_Ada7" + LorR + ".jpg");

                img_Sobel.Save(LorR + "\\" + "img_Sobel" + LorR + ".jpg");
                img_SobelX.Save(LorR + "\\" + "img_SobelX" + LorR + ".jpg");
                img_SobelY.Save(LorR + "\\" + "img_SobelY" + LorR + ".jpg");

                img_laplace.Save(LorR + "\\" + "img_laplace" + LorR + ".jpg");
                img_laplaceByte.Save(LorR + "\\" + "img_laplaceByte" + LorR + ".jpg");

                sobelImage.Save(LorR + "\\" + "sobelImage" + LorR + ".jpg");

                img_EdgeText.Save(LorR + "\\" + "img_EdgeText" + LorR + ".jpg");


                img_overlap = img_Gray.CopyBlank();
                CvInvoke.AddWeighted(img_Edge, 0.3, img_Gray, 0.7, 0, img_overlap);
                img_overlap.Save(LorR + "\\" + "img_overlap" + LorR + ".jpg");
            }

            Draw_cyedg_cybla(list_Pupil, LorR + "\\", img_Edge, img_Threshold);
        }
예제 #7
0
            /// <summary>

            ///  Suma una fila con un arreglo de floats row = row*alpha + values*beta + gamma

            /// </summary>

            /// <param name="RowIndex">Índice de la fila</param>

            /// <param name="values">Valores a sumar a la fila, el largo debe ser igual a la cantidad de columnas de la matriz</param>

            /// <param name="alpha">multiplicador de la fila</param>

            /// <param name="beta">multiplicador de values</param>

            /// <param name="gamma">valor sumado a todos los elementos</param>

            public static void AddWeightedFloatsToRow(this Mat m, int RowIndex, float[] values, double alpha = 1, double beta = 1, double gamma = 0)
            {
                if (m.Depth != DepthType.Cv32F)
                {
                    throw new FormatException("La matriz no es de floats");
                }

                if (RowIndex < 0 || RowIndex >= m.Rows)
                {
                    throw new FieldAccessException("La fila " + RowIndex + " no existe en la matriz");
                }

                if (values.Length != m.Cols)
                {
                    throw new FieldAccessException("Las columnas de la matriz (" + m.Cols + ") no coinciden con el largo del arreglo (" + values.Length + ")");
                }

                Mat rowmat = m.GetRowHeader(RowIndex);

                Mat valmat = values.GetMatHeader();

                CvInvoke.AddWeighted(rowmat, alpha, valmat, beta, gamma, rowmat);

                rowmat.Dispose();

                valmat.Dispose();
            }
예제 #8
0
        static void Main(string[] args)
        {
            var img       = CvInvoke.Imread(Path.Join("resources", "ZeroSweater.jpg"));
            var gray      = new Mat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv16S, 1);
            var gradX     = new Mat(gray.Rows, gray.Cols, Emgu.CV.CvEnum.DepthType.Cv16S, 1);
            var gradY     = new Mat(gray.Rows, gray.Cols, Emgu.CV.CvEnum.DepthType.Cv16S, 1);
            var absGradX  = new Mat(gray.Rows, gray.Cols, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
            var absGradY  = new Mat(gray.Rows, gray.Cols, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
            var sobelGrad = new Mat(gray.Rows, gray.Cols, Emgu.CV.CvEnum.DepthType.Cv8U, 1);

            CvInvoke.CvtColor(img, gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
            CvInvoke.GaussianBlur(gray, gray, new System.Drawing.Size(3, 3), 0);

            CvInvoke.Sobel(gray, gradX, Emgu.CV.CvEnum.DepthType.Cv8U, 1, 0, 3);
            CvInvoke.Sobel(gray, gradY, Emgu.CV.CvEnum.DepthType.Cv8U, 0, 1, 3);

            CvInvoke.ConvertScaleAbs(gradX, absGradX, 1, 0);
            CvInvoke.ConvertScaleAbs(gradY, absGradY, 1, 0);

            CvInvoke.AddWeighted(absGradX, .5, absGradY, .5, 0, sobelGrad);

            CvInvoke.Imshow("sobel x", absGradX);
            CvInvoke.Imshow("sobel Y", absGradY);
            CvInvoke.Imshow("sobel", sobelGrad);

            CvInvoke.Imwrite("sobelX.jpg", absGradX);
            CvInvoke.Imwrite("sobelY.jpg", absGradY);
            CvInvoke.Imwrite("sobel.jpg", sobelGrad);

            CvInvoke.Imshow("gray", gray);
            CvInvoke.WaitKey(0);
        }
예제 #9
0
파일: Form1.cs 프로젝트: xiaodelea/Emgucv
        private void sobelToolStripMenuItem_Click(object sender, EventArgs e)
        {
            try
            {
                if (pictureBox1.Image == null)
                {
                    return;
                }

                float[,] data =
                {
                    { -1, 0, 1 },
                    { -2, 0, 2 },
                    { -1, 0, 1 }
                };
                Matrix <float> SEx = new Matrix <float>(data);

                Matrix <float> SEy = SEx.Transpose();

                var img = new Bitmap(pictureBox1.Image)
                          .ToImage <Bgr, float>();

                var Gx = new Mat();
                var Gy = new Mat();

                CvInvoke.Sobel(img, Gx, Emgu.CV.CvEnum.DepthType.Cv32F, 1, 0);
                CvInvoke.Sobel(img, Gy, Emgu.CV.CvEnum.DepthType.Cv32F, 0, 1);

                var gx = Gx.ToImage <Gray, float>();
                var gy = Gy.ToImage <Gray, float>();

                var Gxx = new Mat(Gx.Size, Emgu.CV.CvEnum.DepthType.Cv32F, 1);
                var Gyy = new Mat(Gx.Size, Emgu.CV.CvEnum.DepthType.Cv32F, 1);

                CvInvoke.ConvertScaleAbs(Gx, Gxx, 0, 0);
                CvInvoke.ConvertScaleAbs(Gy, Gyy, 0, 0);

                var mag = new Mat(Gx.Size, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
                CvInvoke.AddWeighted(Gxx, 0.5, Gyy, 0.5, 0, mag);

                AddImage(mag.ToImage <Bgr, byte>(), "Mag Absolute");


                gx._Mul(gx);
                gy._Mul(gy);

                var M = new Mat(gx.Size, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
                CvInvoke.Sqrt(gx + gy, M);
                AddImage(M.ToImage <Bgr, byte>(), "Mag Squared");
                //CvInvoke.Filter2D(img, Gx, SEx, new Point(-1, -1));
                //CvInvoke.Filter2D(img, Gy, SEy, new Point(-1, -1));

                pictureBox1.Image = M.ToBitmap();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
예제 #10
0
        private void ShowDetail()
        {
            int id = -1;

            this.Dispatcher.Invoke(() =>
            {
                id = cbFOV.SelectedIndex;
            });
            System.Drawing.Rectangle ROI = mModel.Gerber.FOVs[id].ROI;
            this.Dispatcher.Invoke(() =>
            {
                txtROIX.Text      = ROI.X.ToString();
                txtROIY.Text      = ROI.Y.ToString();
                txtROIWidth.Text  = ROI.Width.ToString();
                txtROIHeight.Text = ROI.Height.ToString();
            });

            if (mImage != null)
            {
                var modelFov = mModel.FOV;
                System.Drawing.Rectangle ROIGerber = new System.Drawing.Rectangle(
                    mAnchorROIGerber[id].X - modelFov.Width / 2, mAnchorROIGerber[id].Y - modelFov.Height / 2,
                    modelFov.Width, modelFov.Height);
                mImage.ROI = ROI;
                using (Image <Bgr, byte> imgGerberBgr = new Image <Bgr, byte>(ROIGerber.Size))
                    using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                    {
                        for (int i = 0; i < mModel.Gerber.PadItems.Count; i++)
                        {
                            PadItem item = mModel.Gerber.PadItems[i];
                            if (item.FOVs.Count > 0)
                            {
                                if (item.FOVs[0] == id)
                                {
                                    System.Drawing.Point[] cntPointSub = new System.Drawing.Point[item.ContourAdjust.Length];
                                    for (int j = 0; j < cntPointSub.Length; j++)
                                    {
                                        cntPointSub[j] = new System.Drawing.Point(item.ContourAdjust[j].X - ROIGerber.X, item.ContourAdjust[j].Y - ROIGerber.Y);
                                    }
                                    contours.Push(new VectorOfPoint(cntPointSub));
                                }
                            }
                        }
                        CvInvoke.DrawContours(imgGerberBgr, contours, -1, new MCvScalar(255), -1);
                        CvInvoke.AddWeighted(imgGerberBgr, 0.5, mImage, 0.5, 1, imgGerberBgr);
                        this.Dispatcher.Invoke(() =>
                        {
                            BitmapSource bms = Utils.Convertor.Bitmap2BitmapSource(imgGerberBgr.Bitmap);
                            imb.Source       = bms;
                        });
                    }
                GC.Collect();
                GC.WaitForPendingFinalizers();
            }
            else
            {
                mLog.Info(string.Format("Cant Capture image in FOV : {0}", id + 1));
            }
        }
예제 #11
0
        /// <remarks>
        /// // Converted Implementation of MSSIM from
        /// https://docs.opencv.org/2.4/doc/tutorials/gpu/gpu-basics-similarity/gpu-basics-similarity.html
        /// </remarks>
        public static double MSSIMCpu(Mat i1, Mat i2, MSSIMCpuParam b, double C1 = defaultC1, double C2 = defaultC2)
        {
            try
            {
                var gaussianSize = new Size(3, 3);
                i1.ConvertTo(b.I1, b.DepthType);
                i2.ConvertTo(b.I2, b.DepthType);

                CvInvoke.Multiply(b.I1, b.I1, b.I1_2);
                CvInvoke.Multiply(b.I2, b.I2, b.I2_2);
                CvInvoke.Multiply(b.I1, b.I2, b.I1_I2);

                CvInvoke.GaussianBlur(b.I1, b.Mu1, gaussianSize, 1.5);
                CvInvoke.GaussianBlur(b.I2, b.Mu2, gaussianSize, 1.5);

                CvInvoke.Multiply(b.Mu1, b.Mu1, b.Mu1_2);
                CvInvoke.Multiply(b.Mu2, b.Mu2, b.Mu2_2);
                CvInvoke.Multiply(b.Mu1, b.Mu2, b.Mu1_Mu2);

                CvInvoke.GaussianBlur(b.I1_2, b.Sigma1_2, gaussianSize, 1.5);
                CvInvoke.GaussianBlur(b.I2_2, b.Sigma2_2, gaussianSize, 1.5);
                CvInvoke.GaussianBlur(b.I1_I2, b.Sigma12, gaussianSize, 1.5);

                //sigma1_2 = sigma1_2 - mu1_2
                CvInvoke.AddWeighted(b.Sigma1_2, 1, b.Mu1_2, -1, 0, b.Sigma1_2);

                //sigma2_2 = sigma2_2 - mu2_2
                CvInvoke.AddWeighted(b.Sigma2_2, 1, b.Mu2_2, -1, 0, b.Sigma2_2);

                //sigma12 = sigma12 - mu1_mu2
                CvInvoke.AddWeighted(b.Sigma12, 1, b.Mu1_Mu2, -1, 0, b.Sigma12);

                // t1 = 2 * mu1_mu2 + C1
                CvInvoke.AddWeighted(b.Mu1_Mu2, 2, b.Ones, C1, 0, b.T1);
                // t2 = 2 * sigma12 + C2
                CvInvoke.AddWeighted(b.Sigma12, 2, b.Ones, C1, 0, b.T2);
                CvInvoke.Multiply(b.T1, b.T2, b.T3);

                //t1 = mu1_2 + mu2_2 + C1;
                CvInvoke.AddWeighted(b.Mu1_2, 1, b.Mu2_2, 1, C1, b.T1);
                //t2 = sigma1_2m + sigma2_2m + C2;
                CvInvoke.AddWeighted(b.Sigma1_2, 1, b.Sigma2_2, 1, C2, b.T2);
                CvInvoke.Multiply(b.T1, b.T2, b.T1);

                CvInvoke.Divide(b.T3, b.T1, b.SSIM_map);

                var mssim = CvInvoke.Mean(b.SSIM_map);
                return(Math.Sqrt(
                           mssim.V0 * mssim.V0 +
                           mssim.V1 * mssim.V1 +
                           mssim.V2 * mssim.V2
                           ));
            }
            catch (Exception)
            {
                throw;
            }
        }
예제 #12
0
        /// <summary>
        /// Sharp image with default values.
        /// </summary>
        /// <param name="input">input image</param>
        /// <returns>sharpened image</returns>
        public static UMat Sharp(UMat input)
        {
            UMat temp = new UMat();

            CvInvoke.GaussianBlur(input, temp, new Size(0, 0), 3);
            CvInvoke.AddWeighted(input, 1.5, temp, -0.5, 0, temp);
            input.Dispose();
            return(temp);
        }
예제 #13
0
        // 叠加图像
        private Image <Bgr, byte> PicSubtraction(Image <Bgr, byte> pic1, Image <Bgr, byte> pic2)
        {
            Image <Bgr, byte> outpic = new Image <Bgr, byte>(pic1.Size);

            pic1 = ContourFilling(ToBin(picture));
            pic2 = ContourFilling2(ToBin(picture));
            CvInvoke.AddWeighted(pic1, 0.5, pic2, 0.5, 1, outpic);

            return(outpic);
        }
예제 #14
0
        public override bool Do()
        {
            try
            {
                int        comparableID     = 0;
                BaseFilter betaSourceFilter = Graph.GetFilter(EnumPropertys["BetaSource"].Value);


                for (int SourcesId = 0; SourcesId < Sources.Count; SourcesId++)
                {
                    var AlphaDatas = Sources[SourcesId].GetOut();
                    var betaDatas  = betaSourceFilter.GetOut();

                    for (int ImgId = 0; ImgId < AlphaDatas.Count; ImgId++)
                    {
                        Mat alphaImg = AlphaDatas[ImgId].Image;
                        Mat betaImg  = null;


                        if (FilePropertys["BetaFileImg"].FileName.Length > 0)
                        {
                            betaImg = CvInvoke.Imread(FilePropertys["BetaFileImg"].FileName);
                        }
                        else
                        {
                            if (comparableID < AlphaDatas.Count)
                            {
                                betaImg = betaDatas[comparableID].Image.Clone();
                            }
                            else
                            {
                                betaImg = new Mat(alphaImg.Size, alphaImg.Depth, alphaImg.NumberOfChannels);
                            }
                        }
                        comparableID++;


                        Mat resImg = new Mat();
                        CvInvoke.AddWeighted(alphaImg, FloatPropertys["Alpha"].Value,
                                             betaImg, FloatPropertys["Beta"].Value,
                                             FloatPropertys["Gamma"].Value,
                                             resImg, DepthType.Cv8U);

                        this.m_out.Add(new DataSrc(resImg, AlphaDatas[ImgId].Info, false));
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                return(false);
            }
            return(true);
        }
예제 #15
0
            /// <summary>

            /// Multiplica los valores de la diagonal de una matriz por un factor

            /// </summary>

            /// <param name="factor">factor por el cual se multiplicará la diagonal</param>

            public static void ScaleDiagonal(this Mat m, double factor)
            {
                Mat temp = new Mat(m.Rows, m.Cols, m.Depth, m.NumberOfChannels);

                double f = factor - 1;

                CvInvoke.SetIdentity(temp, new MCvScalar(f, f, f, f));

                CvInvoke.AddWeighted(m, 1, temp, 1, 0, m);

                temp.Dispose();
            }
예제 #16
0
        public static UMat getExposureCorrected(ref UMat img, double ev)
        {
            UMat dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat outImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            //outImg = (UMat)ev*dblImg;
            CvInvoke.AddWeighted(dblImg, ev, dblImg, 0, 0, outImg);
            //CvInvoke.cvConvertScale(dblImg, outImg, ev,0);
            dblImg.Dispose();
            img.Dispose();
            return(outImg);
        }
예제 #17
0
        public static UMat getContrastAdjusted(ref UMat img, double cont1, double cont2)
        {
            UMat dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat outImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            //outImg = (UMat)ev*dblImg;
            CvInvoke.AddWeighted(dblImg, cont1, dblImg, 0, cont1 * (-128) + cont2 + 128, outImg);
            //CvInvoke.cvConvertScale(dblImg, outImg, ev,0);
            dblImg.Dispose();
            img.Dispose();
            return(outImg);
        }
        public Image <Bgr, byte> getProcessed(Mat img)
        {
            Mat dblImg     = new Mat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            Mat dblBlurImg = new Mat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            Mat outImg     = new Mat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            int k = 2 * (int)Math.Round(3.0 * sigma) + 1;

            CvInvoke.GaussianBlur(dblImg, dblBlurImg, new Size(k, k), sigma, sigma);
            CvInvoke.AddWeighted(dblImg, 1.0 + amount, dblBlurImg, -amount, 0, outImg);
            return(outImg.ToImage <Bgr, byte>());
        }
예제 #19
0
        // 填充缺陷轮廓
        private Image <Bgr, byte> ContourFilling3(Image <Bgr, byte> pic)
        {
            Image <Bgr, byte> outpic = new Image <Bgr, byte>(pic.Size);
            Image <Ycc, byte> ycc    = pic.Convert <Ycc, byte>();

            for (int i = 0; i < ycc.Height; i++)
            {
                for (int j = 0; j < ycc.Width; j++)
                {
                    if (ycc[i, j].Cr > 35 && ycc[i, j].Cr < 148 &&
                        ycc[i, j].Cb > 48 && ycc[i, j].Cb < 141)
                    {
                        ycc[i, j] = new Ycc(0, 0, 0);
                    }
                    else
                    {
                        ycc[i, j] = new Ycc(255, 255, 255);
                    }
                }
            }
            Image <Gray, byte> gray = ycc.Convert <Gray, byte>();

            gray = gray.ThresholdBinary(new Gray(100), new Gray(255));
            gray = gray.Canny(100, 60);
            Image <Gray, byte>    outcon = new Image <Gray, byte>(pic.Size);
            VectorOfVectorOfPoint con    = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(gray, con, outcon, RetrType.External, ChainApproxMethod.ChainApproxNone);
            int n = 0;

            for (int i = 0; i < con.Size; i++)
            {
                if (CvInvoke.ContourArea(con[i]) > 0)
                {
                    n++;
                }
            }
            textBox1.Text = "共" + n.ToString() + "个缺陷" + "      " + "\n";
            n             = 0;
            for (int i = 0; i < con.Size; i++)
            {
                if (CvInvoke.ContourArea(con[i]) > 0)
                {
                    CvInvoke.DrawContours(outpic, con, i, new MCvScalar(0, 255, 0), 5);
                    textBox1.Text = textBox1.Text + "第" + (++n).ToString() + "个缺陷的面积为" + CvInvoke.ContourArea(con[i]) + "    \n";
                }
            }
            CvInvoke.AddWeighted(outpic, 0.5, picture, 0.5, 0, outpic);
            return(outpic);
        }
예제 #20
0
        public static UMat getSaturationAdjusted(ref UMat img, double amount)
        {
            Image <Hsv, double> outImg = img.ToImage <Hsv, double>();
            UMat dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            outImg = img.ToImage <Hsv, double>();
            var colors = new VectorOfUMat(3);

            CvInvoke.Split(outImg, colors);
            double shift = (1 + amount) >= 0.0 ? 1 + amount : 0;

            CvInvoke.AddWeighted(colors[1], shift, colors[1], 0, 0, colors[1]);
            CvInvoke.Merge(colors, dblImg);
            return(dblImg);
        }
예제 #21
0
        // *************************** 模块8 END  ***********************************************************//

        /*
         * 模块9:LAD_resShow
         * 功能: 显示最后效果
         */
        // *************************** 模块9 BEGIN  ***********************************************************//
        public Mat LAD_resShow(Mat srcImage, Mat bianaryImage, Matrix <double> left_Matrix, Matrix <double> right_Matrix)
        {
            Mat image = new Mat();

            bianaryImage.CopyTo(image);

            Mat uphalf     = LAD_getUpHalf(srcImage);
            Mat bottomHalf = LAD_getROI(srcImage);

            Mat warpImage = new Mat(bianaryImage.Size, DepthType.Cv8U, 3);

            warpImage.SetTo(new Bgr(0, 0, 0).MCvScalar);

            Mat warpCopy = warpImage.Clone();

            int H = bianaryImage.Rows;
            int W = bianaryImage.Cols;

            Point[] left_fitted_points = new Point[H];
            for (int x = 0; x < H; x++)
            {
                int y = (int)(left_Matrix.Data[0, 0] + left_Matrix.Data[1, 0] * x + left_Matrix.Data[2, 0] * Math.Pow(x, 2));
                left_fitted_points[x] = new Point(y, x);
            }
            CvInvoke.Polylines(warpImage, left_fitted_points, false, new Bgr(255, 0, 0).MCvScalar, 5, LineType.EightConnected);

            Point[] right_fitted_points = new Point[H];
            for (int x = 0; x < H; x++)
            {
                int y = (int)(right_Matrix.Data[0, 0] + right_Matrix.Data[1, 0] * x + right_Matrix.Data[2, 0] * Math.Pow(x, 2));
                right_fitted_points[x] = new Point(y, x);
            }
            CvInvoke.Polylines(warpImage, right_fitted_points, false, new Bgr(255, 0, 0).MCvScalar, 5, LineType.EightConnected);

            Mat rev = LAD_reverseAffineTrans(warpImage);

            Mat laneImage = new Mat();

            CvInvoke.AddWeighted(bottomHalf, 0.5, rev, 0.5, 0, laneImage);

            Mat tmp = new Mat();

            CvInvoke.AddWeighted(uphalf, 0.5, warpCopy, 0.5, 0, tmp);

            Mat dst = LAD_ImageStitch_2(tmp, laneImage);

            return(dst);
        }
예제 #22
0
        private Image <Gray, Byte> processImage(Image <Bgr, Byte> image)
        {
            Mat hsvImg = new Mat();

            CvInvoke.CvtColor(image, hsvImg, ColorConversion.Bgr2Hsv);
            //Threshold image, keep only the red pixel
            Mat lower_hue_range = new Mat();
            Mat upper_hue_range = new Mat();

            CvInvoke.InRange(hsvImg, new ScalarArray(new MCvScalar(0, 100, 30)), new ScalarArray(new MCvScalar(80, 255, 255)), lower_hue_range); //80(multi color) -> 10(only red)
            CvInvoke.InRange(hsvImg, new ScalarArray(new MCvScalar(160, 100, 30)), new ScalarArray(new MCvScalar(179, 255, 255)), upper_hue_range);
            //// Combine the above two images
            CvInvoke.AddWeighted(lower_hue_range, 1.0, upper_hue_range, 1.0, 0.0, hsvImg);
            //CvInvoke.MedianBlur(hue_image, hue_image, 9);
            return(new Image <Gray, Byte>(hsvImg.Bitmap));
        }
예제 #23
0
        public override void Process(ref Image <Bgr, byte> image)
        {
            // the gradient image in x
            var gradX = image.Sobel(_order, 0, _apertureSize).ConvertScale <byte>(1, 0);
            // the gradient image in y
            var gradY = image.Sobel(0, _order, _apertureSize).ConvertScale <byte>(1, 0);
            var grad  = new Image <Bgr, byte>(image.Width, image.Height);

            // blend the gradient images
            CvInvoke.AddWeighted(gradX, 0.5, gradY, 0.5, 1.0, grad);
            // invert the image
            if (_invert)
            {
                CvInvoke.BitwiseNot(grad, grad);
            }
            image = grad.Convert <Bgr, byte>();
        }
예제 #24
0
        static public Mat drawRect(Mat inputMat, Point pointStart, Point pointEnd)
        {
            Mat overlay   = new Mat();
            Mat outputMat = new Mat();

            overlay   = inputMat.Clone();
            outputMat = inputMat.Clone();

            Point     rectPoint = new Point(pointStart.X < pointEnd.X ? pointStart.X : pointEnd.X, pointStart.Y < pointEnd.Y ? pointStart.Y : pointEnd.Y);
            Size      rectSize  = new Size(Math.Abs(pointStart.X - pointEnd.X), Math.Abs(pointStart.Y - pointEnd.Y));
            Rectangle rect      = new Rectangle(rectPoint, rectSize);

            CvInvoke.Rectangle(overlay, rect, new Bgr(Color.Cyan).MCvScalar, 2);
            CvInvoke.AddWeighted(inputMat, 0.7, overlay, 0.3, 0, outputMat);
            CvInvoke.Rectangle(outputMat, rect, new Bgr(Color.Cyan).MCvScalar, 1);

            return(outputMat);
        }
예제 #25
0
        public static UMat getColorAdjusted(ref UMat img, double redshift, double greenshift, double blueshift)
        {
            double shift;
            UMat   dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            var colors = new VectorOfUMat(3);

            CvInvoke.Split(img, colors);
            shift = (1 + redshift) > 0 ? (1 + redshift) : 0;
            CvInvoke.AddWeighted(colors[2], shift, colors[2], 0, 0, colors[2]);
            shift = (1 + greenshift) > 0 ? (1 + greenshift) : 0;
            CvInvoke.AddWeighted(colors[1], shift, colors[1], 0, 0, colors[1]);
            shift = (1 + blueshift) > 0 ? (1 + blueshift) : 0;
            CvInvoke.AddWeighted(colors[0], shift, colors[0], 0, 0, colors[0]);
            CvInvoke.Merge(colors, dblImg);
            img.Dispose();
            return(dblImg);
        }
예제 #26
0
        public static Image <Bgr, byte> MaskOverlay(Image <Bgr, byte> Image, Image <Gray, Byte> Mask)
        {
            Image <Bgr, byte> greenImage = Image.CopyBlank();

            greenImage.SetValue(new Bgr(0, 255, 0), Mask);
            Image <Bgr, byte> redImage = Image.CopyBlank();

            Mask._Not();
            redImage.SetValue(new Bgr(0, 0, 255), Mask);
            var colorMask    = redImage.Add(greenImage);
            var blendedImage = Image.CopyBlank();

            CvInvoke.AddWeighted(Image, 0.8, colorMask, 0.2, 0, blendedImage);

            redImage.Dispose();
            greenImage.Dispose();
            colorMask.Dispose();

            return(blendedImage);
        }
예제 #27
0
        private void AddNoise(Mat image)
        {
            const double noiseLevel = 0.75;
            var          mean       = new MCvScalar(0);
            var          std        = new MCvScalar(255);
            const int    gaussSize  = 13;
            const double scale      = 0.5;
            const double shift      = 100;

            var noise = new Mat(image.Size, DepthType.Cv8U, 1);

            using (ScalarArray scalarArray1 = new ScalarArray(mean))
                using (ScalarArray scalarArray2 = new ScalarArray(std))
                {
                    CvInvoke.Randn(noise, scalarArray1, scalarArray2);
                }
            CvInvoke.GaussianBlur(noise, noise, new Size(gaussSize, gaussSize), 0.0);
            CvInvoke.AddWeighted(image, 1 - noiseLevel, noise, noiseLevel, 0, image, image.Depth);
            CvInvoke.ConvertScaleAbs(image, image, scale, shift);
        }
예제 #28
0
        public static Mat sobelEdgeDetection(ref Mat src_roi)
        {
            Mat roi_gray  = new Mat();
            Mat grad_x    = new Mat();
            Mat grad_y    = new Mat();
            Mat grad_absx = new Mat();
            Mat grad_absy = new Mat();
            Mat roi_soble = new Mat();

            CvInvoke.CvtColor(src_roi, roi_gray, ColorConversion.Rgb2Gray);

            CvInvoke.Sobel(roi_gray, grad_x, DepthType.Cv16S, 1, 0, 3, 1, 1, BorderType.Default);//x方向的sobel检测
            CvInvoke.ConvertScaleAbs(grad_x, grad_absx, 1, 0);

            CvInvoke.Sobel(roi_gray, grad_y, DepthType.Cv16S, 0, 1, 3, 1, 1, BorderType.Default);//y方向的sobel检测
            CvInvoke.ConvertScaleAbs(grad_y, grad_absy, 1, 0);

            CvInvoke.AddWeighted(grad_absx, 0.5, grad_absy, 0.5, 0, roi_soble);

            return(roi_soble);
        }
예제 #29
0
        /**
         * \brief	Draws a bounding box on the viewport.
         * \param	void
         * \return	void
         */
        private void drawBoundingBox()
        {
            if (mainImage != null)
            {
                Image <Bgr, byte> overlay = mainImage.Copy();

                if (showingBox)
                {
                    CvInvoke.Rectangle(overlay, new Rectangle(
                                           boxStart.X,
                                           boxStart.Y,
                                           (boxEnd.X - boxStart.X),
                                           (boxEnd.Y - boxStart.Y)),
                                       new MCvScalar(255, 200, 10), -1);

                    CvInvoke.AddWeighted(overlay, 0.3, mainImage, 0.7, 0, boxImage);
                }

                viewport.Image = boxImage.Bitmap;
            }
        }
예제 #30
0
        //Save the circle & eye overlapping image for debugging purpose , Optional
        private void Draw_cyedg_cybla(List <CircleF> list_Pupil, String Directory, Image <Gray, byte> Edge, Image <Gray, byte> Black)
        {
            int count = 0;

            foreach (CircleF cy in list_Pupil)
            {
                Image <Gray, byte> cyedg = new Image <Gray, byte>(img_Edge.Width, img_Edge.Height, new Gray(255));
                //cyedg.Draw(cy, new Gray(0), cyedg_thickness);

                for (double theta = 0.0; theta < 2.0; theta += 0.01)
                {
                    //if ((theta > 0.25 && theta < 0.75) || theta > 1.0 && theta < 1.99) continue;
                    //if ( theta > 1.0 && theta < 1.99) continue;
                    double rx = cy.Center.X + cy.Radius * Math.Cos(theta * Math.PI) + 0.5;
                    double ry = cy.Center.Y + cy.Radius * Math.Sin(theta * Math.PI) + 0.5;
                    cyedg.Draw(new CircleF(new PointF((int)rx, (int)ry), 0), new Gray(0), 0);

                    rx = cy.Center.X + (cy.Radius + 1) * Math.Cos(theta * Math.PI) + 0.5;
                    ry = cy.Center.Y + (cy.Radius + 1) * Math.Sin(theta * Math.PI) + 0.5;
                    cyedg.Draw(new CircleF(new PointF((int)rx, (int)ry), 0), new Gray(0), 0);

                    rx = cy.Center.X + (2) * Math.Cos(theta * Math.PI) + 0.5;
                    ry = cy.Center.Y + (2) * Math.Sin(theta * Math.PI) + 0.5;
                    cyedg.Draw(new CircleF(new PointF((int)rx, (int)ry), 0), new Gray(0), 0);
                }



                CvInvoke.AddWeighted(img_SobelX.Convert <Gray, byte>(), 0.5, cyedg, 0.5, 0, cyedg);
                cyedg.Save(Directory + "cyedg" + count + ".jpg");

                Image <Gray, byte> cybla = new Image <Gray, byte>(img_Edge.Width, img_Edge.Height, new Gray(255));
                cybla.Draw(cy, new Gray(0), cyble_thickness);
                CvInvoke.AddWeighted(Black, 0.5, cybla, 0.5, 0, cybla);
                cybla.Save(Directory + "cybla" + count + ".jpg");

                count++;
            }
        }