Example #1
0
        private void differenceToolStripMenuItem_Click(object sender, EventArgs e)
        {
            try
            {
                List <string> list = new List <string>();
                foreach (TreeNode item in treeView1.Nodes)
                {
                    if (item.Checked)
                    {
                        list.Add(item.Text);
                    }
                }

                if (list.Count > 1)
                {
                    var img = new Mat();
                    CvInvoke.AbsDiff(imgList[list[0]], imgList[list[1]], img);
                    AddImage(img.ToImage <Bgr, byte>(), "Difference");
                    pictureBox1.Image = img.ToBitmap();
                }
                else
                {
                    MessageBox.Show("Select two images.");
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Example #2
0
        public static Rectangle detect_blue_rectangle(Bitmap b1, Bitmap b2)
        {
            Rectangle         ret  = Rectangle.Empty;
            Image <Bgr, Byte> img1 = new Image <Bgr, byte>(b1);
            Image <Bgr, Byte> img2 = new Image <Bgr, byte>(b2);
            Mat diff = new Mat();

            CvInvoke.AbsDiff(img1, img2, diff);
            Mat tmp = new Mat();

            CvInvoke.CvtColor(diff, tmp, ColorConversion.Bgr2Gray);
            CvInvoke.Threshold(tmp, diff, 0, 255, ThresholdType.Binary | ThresholdType.Otsu);
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(diff, contours, null, RetrType.External, ChainApproxMethod.ChainApproxNone);
                int    count = contours.Size;
                double m     = 0.0;
                for (int i = 0; i < count; i++)
                {
                    double d = CvInvoke.ContourArea(contours[i]);
                    if (d > m)
                    {
                        m   = d;
                        ret = CvInvoke.BoundingRectangle(contours[i]);
                    }
                }
            }
            Program.logIt(string.Format("detect_blue_rectangle: -- {0}", ret));
            return(ret);
        }
Example #3
0
        private static int[] GetAdjust(Image <Gray, byte> ImgCap, Image <Gray, byte> ImgGerber, int RangeX, int RangeY)
        {
            int    x_ok = 0;
            int    y_ok = 0;
            double diff = 2448 * 2018;

            for (int x = -RangeX; x < RangeX; x++)
            {
                for (int y = -RangeY; y < RangeY; y++)
                {
                    using (Image <Gray, byte> imgTransform = ImageProcessingUtils.ImageTransformation(ImgCap.Copy(), x, y))
                    {
                        CvInvoke.AbsDiff(imgTransform, ImgGerber, imgTransform);

                        int count = CvInvoke.CountNonZero(imgTransform);
                        if (count < diff)
                        {
                            diff = count;
                            x_ok = x;
                            y_ok = y;
                        }
                    }
                }
            }
            return(new int[] { x_ok, y_ok });
        }
Example #4
0
        /**********************************************************
        * Frame processing using background subtraction and
        * morphological operations, (sensitive to light)
        **********************************************************/
        private static void ProcessFrame(Mat backgroundFrame_l, Mat backgroundFrame_r, int threshold, int erodeIterations, int dilateIterations)
        {
            //Find difference between background(first) frame and current frame
            CvInvoke.AbsDiff(backgroundFrame_l, rawFrame_l, diffFrame_l);
            CvInvoke.AbsDiff(backgroundFrame_r, rawFrame_r, diffFrame_r);

            //Apply binary threshold to grayscale image(white pixel will mark difference)
            CvInvoke.CvtColor(diffFrame_l, grayscaleDiffFrame_l, ColorConversion.Bgr2Gray);
            CvInvoke.CvtColor(diffFrame_r, grayscaleDiffFrame_r, ColorConversion.Bgr2Gray);
            CvInvoke.Threshold(grayscaleDiffFrame_l, binaryDiffFrame_l, threshold, 255, ThresholdType.Binary);
            CvInvoke.Threshold(grayscaleDiffFrame_r, binaryDiffFrame_r, threshold, 255, ThresholdType.Binary);

            //Remove noise with opening operation(erosion followed by dilation)
            CvInvoke.Erode(binaryDiffFrame_l, denoisedDiffFrame_l, null, new Point(-1, -1), erodeIterations, BorderType.Default, new MCvScalar(1));
            CvInvoke.Erode(binaryDiffFrame_r, denoisedDiffFrame_r, null, new Point(-1, -1), erodeIterations, BorderType.Default, new MCvScalar(1));
            CvInvoke.Dilate(denoisedDiffFrame_l, denoisedDiffFrame_l, null, new Point(-1, -1), dilateIterations, BorderType.Default, new MCvScalar(1));
            CvInvoke.Dilate(denoisedDiffFrame_r, denoisedDiffFrame_r, null, new Point(-1, -1), dilateIterations, BorderType.Default, new MCvScalar(1));

            rawFrame_l.CopyTo(finalFrame_l);
            rawFrame_r.CopyTo(finalFrame_r);

            left_camera = true;
            DetectObject(denoisedDiffFrame_l, finalFrame_l);

            left_camera = false;
            DetectObject(denoisedDiffFrame_r, finalFrame_r);
        }
Example #5
0
        public void TestConvolutionAndLaplace()
        {
            Mat image = new Mat(new Size(300, 400), DepthType.Cv8U, 1);

            CvInvoke.Randu(image, new MCvScalar(0.0), new MCvScalar(255.0));
            Mat laplacian = new Mat();

            CvInvoke.Laplacian(image, laplacian, DepthType.Cv8U);

            float[,] k = { { 0,  1, 0 },
                           { 1, -4, 1 },
                           { 0,  1, 0 } };
            ConvolutionKernelF kernel = new ConvolutionKernelF(k);
            Mat convoluted            = new Mat(image.Size, DepthType.Cv8U, 1);

            CvInvoke.Filter2D(image, convoluted, kernel, kernel.Center);

            Mat absDiff = new Mat();

            CvInvoke.AbsDiff(laplacian, convoluted, absDiff);
            int nonZeroPixelCount = CvInvoke.CountNonZero(absDiff);

            EmguAssert.IsTrue(nonZeroPixelCount == 0);

            //Emgu.CV.UI.ImageViewer.Show(absDiff);
        }
Example #6
0
        public void TestQuaternion3()
        {
            Random      r  = new Random();
            Quaternions q1 = new Quaternions();

            q1.AxisAngle = new MCvPoint3D64f(r.NextDouble(), r.NextDouble(), r.NextDouble());

            Quaternions q2 = new Quaternions();

            q2.AxisAngle = q1.AxisAngle;

            double epsilon = 1.0e-8;

            EmguAssert.IsTrue(Math.Abs(q1.W - q2.W) < epsilon);
            EmguAssert.IsTrue(Math.Abs(q1.X - q2.X) < epsilon);
            EmguAssert.IsTrue(Math.Abs(q1.Y - q2.Y) < epsilon);
            EmguAssert.IsTrue(Math.Abs(q1.Z - q2.Z) < epsilon);

            RotationVector3D rVec = new RotationVector3D(new double[] { q1.AxisAngle.X, q1.AxisAngle.Y, q1.AxisAngle.Z });
            Mat             m1    = rVec.RotationMatrix;
            Matrix <double> m2    = new Matrix <double>(3, 3);

            q1.GetRotationMatrix(m2);
            Matrix <double> diff = new Matrix <double>(3, 3);

            CvInvoke.AbsDiff(m1, m2, diff);
            double norm = CvInvoke.Norm(diff, Emgu.CV.CvEnum.NormType.C);

            EmguAssert.IsTrue(norm < epsilon);

            Quaternions q4 = q1 * Quaternions.Empty;
            //EmguAssert.IsTrue(q4.Equals(q1));
        }
Example #7
0
        static ContourProperties FishContour(Mat image_raw, Mat background)
        {
            bool fishcont_found = false;
            Size frsize = new Size(image_raw.Width, image_raw.Height);
            Mat image = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
            ContourProperties contprops = new ContourProperties();
            ThresholdType ttype = 0;
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();
            CvInvoke.AbsDiff(image_raw, background, image);
// This should be 30 as the LB. Switched to 20 to see if i could pick up paramecia. 
            CvInvoke.Threshold(image, image, 10, 255, ttype);
// UNCOMMENT IF YOU WANT TO SHOW THRESHOLDED IMAGE
            String camerawindow = "Camera Window";
            CvInvoke.NamedWindow(camerawindow);
            CvInvoke.Imshow(camerawindow, image);
            CvInvoke.WaitKey(1);
            CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
            int fish_contour_index = 0;
            Rectangle bounding_rect = new Rectangle();
            for (int ind = 0; ind < contours.Size; ind++)
            {
                bounding_rect = CvInvoke.BoundingRectangle(contours[ind]);
                if (bounding_rect.Width > bounding_rect.Height)
                {
                    contprops.height = bounding_rect.Width;
                }
                else
                {
                    contprops.height = bounding_rect.Height;
                }
                if (contprops.height < 50 && contprops.height > 25)
                {
                    fish_contour_index = ind;
                    fishcont_found = true;
                    break;
                }
            }
            if (fishcont_found)
            {
                var contourCenter = new Point();
                var contourCOM = new Point();
                MCvMoments com = new MCvMoments();
                com = CvInvoke.Moments(contours[fish_contour_index]);
                contourCOM.X = (int)(com.M10 / com.M00);
                contourCOM.Y = (int) (com.M01 / com.M00);
                contourCenter.X = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2);
                contourCenter.Y = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2);
                contprops.center = contourCenter;                
                contprops.com = contourCOM;
            }
            else
            {
                Console.WriteLine(contprops.com);
                Console.WriteLine(contprops.height);
                Console.WriteLine("no contours");
            }
            return contprops;
        }
        public static void AbsDiff(this Mat mat, double value)
        {
            Mat op = new Mat(mat.Size, mat.Depth, mat.NumberOfChannels);

            op.SetTo(new MCvScalar(value));
            CvInvoke.AbsDiff(mat, op, mat);
            op.Dispose();
        }
Example #9
0
        public static Mat Abs(Mat mat)
        {
            Mat copy  = new Mat();
            var zeros = Mat.Zeros(mat.Height, mat.Width, mat.Depth, mat.NumberOfChannels);

            CvInvoke.AbsDiff(mat, zeros, copy);
            return(copy);
        }
Example #10
0
        void Process()
        {
            while (!stop)
            {
                // read frame by frame from video or camera
                cam = cap.QueryFrame();

                // end of video
                if (cam == null)
                {
                    break;
                }

                CvInvoke.Resize(cam, cam, new Size(640, 480));

                try
                {
                    // convert color from BGR to Gray image
                    CvInvoke.CvtColor(cam, gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                    // blur it
                    CvInvoke.GaussianBlur(gray, gray, new Size(21, 21), 0);

                    //CvInvoke.Imshow("gray", gray);

                    // if first frame is null meaning it's first frame of video
                    if (firstFrame == null)
                    {
                        //clone from gray frame
                        firstFrame = gray.Clone();
                        frameDelta = firstFrame.Clone();
                        //CvInvoke.Imshow("firstFrame", firstFrame);
                        continue;
                    }

                    // every frame change, compair with first frame, if there is difference between them, save it to frame delta
                    CvInvoke.AbsDiff(gray, firstFrame, frameDelta);
                    //CvInvoke.Imshow("frameDelta", frameDelta);

                    // threshold frame delta
                    CvInvoke.Threshold(frameDelta, thresh, 25, 255, Emgu.CV.CvEnum.ThresholdType.Binary);


                    Morphops(thresh);

                    //CvInvoke.Imshow("thresh", thresh);

                    // find contours from thresh image
                    findContours(thresh, cam);
                }
                catch (Exception)
                {
                }


                CvInvoke.Imshow("Cam", cam);
                CvInvoke.WaitKey(33);
            }
        }
 private void buttonDifference_Click(object sender, EventArgs e)
 {
     if (imageConverted.Data != null)
     {
         imageDifference = new Image <Gray, byte>(imageOriginal.Cols, imageOriginal.Rows);
         CvInvoke.AbsDiff(imageZoomed, imageConverted, imageDifference);
         imageBox2.Image = imageDifference;
     }
 }
Example #12
0
        private Mat DetectPen()
        {
            if (capture == null)
            {
                return(null);
            }



            Mat       m = new Mat();
            Mat       n = new Mat();
            Mat       o = new Mat();
            Mat       binaryDiffFrame   = new Mat();
            Mat       denoisedDiffFrame = new Mat();
            Mat       finalFrame        = new Mat();
            Rectangle cropbox           = new Rectangle();

            capture.Read(m);

            if (!m.IsEmpty)
            {
                Image <Bgr, byte> ret = m.ToImage <Bgr, byte>();
                Image <Bgr, byte> img = m.ToImage <Bgr, byte>();
                var image             = img.InRange(new Bgr(blue1, green1, red1), new Bgr(blue2, green2, red2));
                var mat = img.Mat;//nueva matriz igual a la anterior
                mat.SetTo(new MCvScalar(0, 0, 255), image);
                mat.CopyTo(ret);

                Image <Bgr, byte> imgout = ret.CopyBlank();//imagen sin fondo negro
                imgout._Or(img);


                CvInvoke.AbsDiff(m, imgout, n);
                // Aplica limite binario a la imagen en escala de grises (white pixels marcan la diferencia)
                CvInvoke.CvtColor(n, o, ColorConversion.Bgr2Gray);
                CvInvoke.Threshold(o, binaryDiffFrame, 5, 255, ThresholdType.Binary);// 5 determina el límite del brillo al convertir la imagen de escala de grises a imagen binaria (blanco y negro)

                // Remueve ruido con la operacion opening (eronde seguida de dilate)
                CvInvoke.Erode(binaryDiffFrame, denoisedDiffFrame, null, new Point(-1, -1), ErodeIterations, BorderType.Default, new MCvScalar(1));
                CvInvoke.Dilate(denoisedDiffFrame, denoisedDiffFrame, null, new Point(-1, -1), DilateIterations, BorderType.Default, new MCvScalar(1));

                m.CopyTo(finalFrame);

                m.Dispose();
                n.Dispose();
                o.Dispose();
                binaryDiffFrame.Dispose();

                return(DetectObject(denoisedDiffFrame, finalFrame, cropbox));
            }
            else
            {
                // break;
            }

            return(null);
        }
Example #13
0
 private void preencherImagemBinariaSemPlanoDeFundo()
 {
     mCopiaImagemPlanoDeFundo = mImagemDoPlanoDeFundo.Clone();
     CvInvoke.AbsDiff(mImagemColorida, mCopiaImagemPlanoDeFundo, mImagemSemPlanoDeFundo);
     CvInvoke.CvtColor(mImagemSemPlanoDeFundo, mImagemCinzaSemPlanoDeFundo, ColorConversion.Rgb2Gray);
     // CvInvoke.Threshold(mImagemCinzaSemPlanoDeFundo, mImagemBinariaSemPlanoDeFundo, ParametrosConstantes.LimiarTransformacaoParaCinza,
     //ParametrosConstantes.MaximoLimiarTransformacaoParaCinza, ThresholdType.Binary);
     CvInvoke.AdaptiveThreshold(mImagemCinzaSemPlanoDeFundo, mImagemBinariaSemPlanoDeFundo, ParametrosConstantes.MaximoLimiarTransformacaoParaCinza,
                                AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 11, 3);
 }
Example #14
0
        public void TestOclPyr()
        {
            if (!CvInvoke.HaveOpenCL)
            {
                return;
            }

            Image <Gray, Byte> img = new Image <Gray, byte>(640, 480);

            //add some randome noise to the image
            img.SetRandUniform(new MCvScalar(), new MCvScalar(255, 255, 255));
            Image <Gray, Byte> down = img.PyrDown();

            //Emgu.CV.UI.ImageViewer.Show(down);

            Image <Gray, Byte> up = down.PyrUp();

            UMat gImg  = img.ToUMat();
            UMat gDown = new UMat();
            UMat gUp   = new UMat();

            CvInvoke.PyrDown(gImg, gDown);
            CvInvoke.PyrUp(gDown, gUp);

            CvInvoke.AbsDiff(down, gDown.ToImage <Gray, Byte>(), down);
            CvInvoke.AbsDiff(up, gUp.ToImage <Gray, Byte>(), up);
            double[] minVals, maxVals;
            Point[]  minLocs, maxLocs;
            down.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
            double maxVal = 0.0;

            for (int i = 0; i < maxVals.Length; i++)
            {
                if (maxVals[i] > maxVal)
                {
                    maxVal = maxVals[i];
                }
            }
            Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
            EmguAssert.IsTrue(maxVal <= 1.0);
            //Assert.LessOrEqual(maxVal, 1.0);

            up.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
            maxVal = 0.0;
            for (int i = 0; i < maxVals.Length; i++)
            {
                if (maxVals[i] > maxVal)
                {
                    maxVal = maxVals[i];
                }
            }
            Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
            EmguAssert.IsTrue(maxVal <= 1.0);
            //Assert.LessOrEqual(maxVal, 1.0);
        }
Example #15
0
        public void TestQuaternions1()
        {
            Quaternions q       = new Quaternions();
            double      epsilon = 1.0e-10;

            Matrix <double> point = new Matrix <double>(3, 1);

            point.SetRandNormal(new MCvScalar(), new MCvScalar(20));
            using (Matrix <double> pt1 = new Matrix <double>(3, 1))
                using (Matrix <double> pt2 = new Matrix <double>(3, 1))
                    using (Matrix <double> pt3 = new Matrix <double>(3, 1))
                    {
                        double x1 = 1.0, y1 = 0.2, z1 = 0.1;
                        double x2 = 0.0, y2 = 0.0, z2 = 0.0;

                        q.SetEuler(x1, y1, z1);
                        q.GetEuler(ref x2, ref y2, ref z2);

                        EmguAssert.IsTrue(
                            Math.Abs(x2 - x1) < epsilon &&
                            Math.Abs(y2 - y1) < epsilon &&
                            Math.Abs(z2 - z1) < epsilon);

                        q.RotatePoints(point, pt1);

                        Matrix <double> rMat = new Matrix <double>(3, 3);
                        q.GetRotationMatrix(rMat);
                        CvInvoke.Gemm(rMat, point, 1.0, null, 0.0, pt2, Emgu.CV.CvEnum.GemmType.Default);

                        CvInvoke.AbsDiff(pt1, pt2, pt3);

                        EmguAssert.IsTrue(
                            pt3[0, 0] < epsilon &&
                            pt3[1, 0] < epsilon &&
                            pt3[2, 0] < epsilon);
                    }

            double rotationAngle = 0.2;

            q.SetEuler(rotationAngle, 0.0, 0.0);
            EmguAssert.IsTrue(Math.Abs(q.RotationAngle - rotationAngle) < epsilon);
            q.SetEuler(0.0, rotationAngle, 0.0);
            EmguAssert.IsTrue(Math.Abs(q.RotationAngle - rotationAngle) < epsilon);
            q.SetEuler(0.0, 0.0, rotationAngle);
            EmguAssert.IsTrue(Math.Abs(q.RotationAngle - rotationAngle) < epsilon);

            q = q * q;
            EmguAssert.IsTrue(Math.Abs(q.RotationAngle / 2.0 - rotationAngle) < epsilon);

            q.SetEuler(0.2, 0.1, 0.05);
            double t = q.RotationAngle;

            q = q * q;
            EmguAssert.IsTrue(Math.Abs(q.RotationAngle / 2.0 - t) < epsilon);
        }
Example #16
0
        public void TestKMeans()
        {
            int clustersCount = 5;
            int sampleCount   = 300;
            int imageSize     = 500;

            Bgr[] colors = new Bgr[]
            {
                new Bgr(0, 0, 255),
                new Bgr(0, 255, 0),
                new Bgr(255, 100, 100),
                new Bgr(255, 0, 255),
                new Bgr(0, 255, 255)
            };

            Image <Bgr, Byte> image = new Image <Bgr, byte>(imageSize, imageSize);

            #region generate random samples

            Matrix <float> points = new Matrix <float>(sampleCount, 1, 2);

            Matrix <int> clusters = new Matrix <int>(sampleCount, 1);
            Random       r        = new Random();
            for (int i = 0; i < clustersCount; i++)
            {
                Matrix <float> row = points.GetRows(i * (sampleCount / clustersCount),
                                                    (i + 1) * (sampleCount / clustersCount), 1);
                row.SetRandNormal(new MCvScalar(r.Next() % imageSize, r.Next() % imageSize),
                                  new MCvScalar((r.Next() % imageSize) / 6, (r.Next() % imageSize) / 6));
            }
            using (ScalarArray ia = new ScalarArray(new MCvScalar()))
            {
                CvInvoke.AbsDiff(points, ia, points);
            }
            CvInvoke.RandShuffle(points, 1.0, 0);

            #endregion

            CvInvoke.Kmeans(
                points,
                2,
                clusters,
                new MCvTermCriteria(10, 1.0),
                5,
                CvEnum.KMeansInitType.PPCenters);

            for (int i = 0; i < sampleCount; i++)
            {
                PointF p = new PointF(points.Data[i, 0], points.Data[i, 1]);
                image.Draw(new CircleF(p, 1.0f), colors[clusters[i, 0]], 1);
            }

            //Emgu.CV.UI.ImageViewer.Show(image);
        }
Example #17
0
        /*
         * private bool IsMatch(IScreenshot screenshot, Mat reference, double threshold)
         * {
         *  var result = new Mat();
         *
         *  CvInvoke.AbsDiff(screenshot.Image, reference, result);
         *  var mean = CvInvoke.Mean(result);
         *
         *  return mean.V0 <= threshold * 255;
         * }
         */

        private bool IsMatch(IScreenshot screenshot, Mat reference, Rectangle roi, double threshold)
        {
            var result        = new Mat();
            var screenshotRoi = new Mat(screenshot.Image, roi);
            var referenceRoi  = new Mat(reference, roi);

            CvInvoke.AbsDiff(screenshotRoi, referenceRoi, result);
            var mean = CvInvoke.Mean(result);

            return(mean.V0 <= threshold * 255);
        }
Example #18
0
        private void Compare(Mat image1, Mat image2)
        {
            using (var difImage = new Mat())
            {
                CvInvoke.AbsDiff(image1, image2, difImage);

                var viewer = new ImageViewer();
                viewer.Image = difImage;
                // viewer.ShowDialog();
            }
        }
Example #19
0
        void Process()
        {
            thresh = gray = new Mat();

            while (true)
            {
                cam = cap.QueryFrame();

                //if cam = null it's mean end of the video
                if (cam == null)
                {
                    break;
                }

                try
                {
                    //convert frame from BGR to Gray
                    CvInvoke.CvtColor(cam, gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                    //blur it
                    CvInvoke.GaussianBlur(gray, gray, new Size(21, 21), 0);

                    //show gray frame
                    CvInvoke.Imshow("gray", gray);

                    //firstFrame is first frame of video, if it is null,
                    if (firstFrame == null)
                    {
                        firstFrame = gray.Clone();
                        frameDelta = firstFrame.Clone();
                        continue;
                    }
                    CvInvoke.AbsDiff(gray, firstFrame, frameDelta);
                    CvInvoke.Imshow("frameDelta", frameDelta);

                    CvInvoke.Threshold(frameDelta, thresh, 25, 255, Emgu.CV.CvEnum.ThresholdType.Binary);
                    Morphops(thresh);

                    CvInvoke.Imshow("thresh", thresh);

                    findContours(thresh, cam);
                }
                catch (Exception)
                {
                }



                CvInvoke.Imshow("Cam", cam);
                CvInvoke.WaitKey(33);
            }
        }
Example #20
0
        public void TestCudaPyr()
        {
            if (!CudaInvoke.HasCuda)
            {
                return;
            }
            Image <Gray, Byte> img = new Image <Gray, byte>(640, 480);

            img.SetRandUniform(new MCvScalar(), new MCvScalar(255, 255, 255));
            Image <Gray, Byte> down = img.PyrDown();
            Image <Gray, Byte> up   = down.PyrUp();

            CudaImage <Gray, Byte> gImg  = new CudaImage <Gray, byte>(img);
            CudaImage <Gray, Byte> gDown = new CudaImage <Gray, byte>(img.Size.Width >> 1, img.Size.Height >> 1);
            CudaImage <Gray, Byte> gUp   = new CudaImage <Gray, byte>(img.Size);

            CudaInvoke.PyrDown(gImg, gDown, null);
            CudaInvoke.PyrUp(gDown, gUp, null);

            CvInvoke.AbsDiff(down, gDown.ToImage(), down);
            CvInvoke.AbsDiff(up, gUp.ToImage(), up);
            double[] minVals, maxVals;
            Point[]  minLocs, maxLocs;
            down.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
            double maxVal = 0.0;

            for (int i = 0; i < maxVals.Length; i++)
            {
                if (maxVals[i] > maxVal)
                {
                    maxVal = maxVals[i];
                }
            }
            Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
            EmguAssert.IsTrue(maxVal <= 1.0);
            //Assert.LessOrEqual(maxVal, 1.0);

            up.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
            maxVal = 0.0;
            for (int i = 0; i < maxVals.Length; i++)
            {
                if (maxVals[i] > maxVal)
                {
                    maxVal = maxVals[i];
                }
            }
            Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
            EmguAssert.IsTrue(maxVal <= 1.0);
            //Assert.LessOrEqual(maxVal, 1.0);
        }
Example #21
0
        static ContourProperties LargestContour(Mat image_raw, Mat background, bool draw)
        {
            Size frsize = new Size(image_raw.Width, image_raw.Height);
            Mat  image  = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
            ContourProperties     contprops = new ContourProperties();
            ThresholdType         ttype     = 0;
            VectorOfVectorOfPoint contours  = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();

            CvInvoke.AbsDiff(image_raw, background, image);
            CvInvoke.Threshold(image, image, 35, 255, ttype);
            CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
            double largest_area       = 0;
            int    largest_area_index = 0;

            for (int ind = 0; ind < contours.Size; ind++)
            {
                double area = CvInvoke.ContourArea(contours[ind]);
                if (area > largest_area)
                {
                    if (image_raw.Width > 1000 && contours[ind][0].Y < 100) // prevents stim LED from being caught as a contour
                    {
                        continue;
                    }
                    largest_area       = area;
                    largest_area_index = ind;
                }
            }
            var contourCenter = new Point();

            if (contours.Size > 0)
            {
                Rectangle bounding_rect = CvInvoke.BoundingRectangle(contours[largest_area_index]);
                contourCenter.X  = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2);
                contourCenter.Y  = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2);
                contprops.center = contourCenter;
                contprops.height = bounding_rect.Height;
                if (draw)
                {
                    CvInvoke.DrawContours(image_raw, contours, largest_area_index, new MCvScalar(255, 0, 0), 2); // these are correct.
                    CvInvoke.Rectangle(image_raw, bounding_rect, new MCvScalar(255, 0, 0));
                    CvInvoke.Circle(image_raw, contourCenter, 50, new MCvScalar(255, 0, 0));                     // THIS IS ABOUT 50 PIXELS TOO HIGH
                }
            }
            else
            {
                //    Console.WriteLine("no contours");
            }
            return(contprops);
        }
Example #22
0
        public static double comparePixel(Image <Bgr, byte> img1, Image <Bgr, byte> img2, double tanhScale = 0.01f)
        {
            Size midSize = new Size((img1.Width + img2.Width) / 2, (img1.Height + img2.Height) / 2);

            img1 = img1.Resize(midSize.Width, midSize.Height, Emgu.CV.CvEnum.Inter.Linear);
            img2 = img2.Resize(midSize.Width, midSize.Height, Emgu.CV.CvEnum.Inter.Linear);
            Mat diffMat = new Mat();

            CvInvoke.AbsDiff(img1, img2, diffMat);
            MCvScalar diffSum = CvInvoke.Sum(diffMat);
            double    x       = (diffSum.V0 + diffSum.V1 + diffSum.V2) / 3.0 / (midSize.Width * midSize.Height);

            return(1 - Math.Tanh(tanhScale * x));
        }
Example #23
0
        /// <summary>
        /// Compare two images - return the percentage the images match
        /// </summary>
        public static double CompareImages(Image <Bgr, byte> firstImage, Mat secondImage)
        {
            using (Image <Bgr, byte> diffImage = new Image <Bgr, byte>(firstImage.Size))
            {
                // OpenCV method to produce an image which is the difference of the 2.
                CvInvoke.AbsDiff(firstImage, secondImage, diffImage);

                // Threshold to filter out pixels that are basically a match.
                // Count remaining black pixels.
                var nonZeroPixels = diffImage.ThresholdBinary(new Bgr(20, 20, 20), new Bgr(255d, 255d, 255d)).CountNonzero().Average();

                // Divide by total pixels in resolution for total percentage images match
                return(1 - (nonZeroPixels / (firstImage.Height * firstImage.Width)));
            }
        }
Example #24
0
        public void TestMatrixSubtract()
        {
            Matrix <float> f = new Matrix <float>(600, 480);

            //set the value to 300
            f.SetValue(new MCvScalar(300));
            f -= 10;
            using (ScalarArray sa = new ScalarArray(290))
                using (Mat absDiff = new Mat())
                {
                    //calculate the different of the value in f mat with 290
                    CvInvoke.AbsDiff(f, sa, absDiff);
                    int nonZeroCount = CvInvoke.CountNonZero(absDiff);
                    //Console.WriteLine(String.Format("number of elements that is not 290: {0}", nonZeroCount));
                }
        }
        public Camera()
        {
            InitializeComponent();

            Capture cap = new Capture();

            contours = new List <Mat>();

            while (true)
            {
                cam = cap.QueryFrame();

                if (cam.IsEmpty)
                {
                    break;
                }

                thresh = cam;
                try
                {
                    CvInvoke.CvtColor(cam, gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                    CvInvoke.GaussianBlur(gray, gray, new Size(640, 480), 20);

                    CvInvoke.AbsDiff(firstFrame, gray, frameDelta);

                    CvInvoke.Threshold(frameDelta, thresh, 25, 255, Emgu.CV.CvEnum.ThresholdType.Binary);

                    //CvInvoke.Dilate(thresh, thresh, null, new Point(-1, -1), 2, Emgu.CV.CvEnum.BorderType.Default,new MCvScalar(0));

                    //CvInvoke.FindContours(thresh.Clone(),contours,)
                }
                catch (Exception)
                {
                }

                if (firstFrame == null)
                {
                    firstFrame = gray;
                }

                CvInvoke.Imshow("thresh", thresh);

                CvInvoke.Imshow("Cam", cam);
                CvInvoke.WaitKey(33);
            }
        }
        private void ProcessFrame(object sender, EventArgs arg)
        {
            frame_in = cam.QueryFrame();

            if (flag_motion)
            {
                frame_copy = frame_in.Clone();

                CvInvoke.CvtColor(frame_copy, frame_gray, ColorConversion.Bgr2Gray);
                CvInvoke.AbsDiff(frame_gray, frame_gray_prev, frame_difference);
                frame_gray_prev.Dispose();
                frame_gray_prev = frame_gray.Clone();
                CvInvoke.Threshold(frame_difference, frame_difference, canny_th1, canny_th2, ThresholdType.Binary);
                CvInvoke.GaussianBlur(frame_difference, frame_difference, new Size(blur_size, blur_size), blur_x, blur_x);
                CvInvoke.Threshold(frame_difference, frame_difference, canny_th1, canny_th2, ThresholdType.Binary);

                if (erosion_size > 0)
                {
                    CvInvoke.Erode(frame_difference, frame_difference, element_erode, new Point(1, 1), 1, BorderType.Default, new MCvScalar(0));
                }

                if (dilation_size > 0)
                {
                    CvInvoke.Dilate(frame_difference, frame_difference, element_dilate, new Point(1, 1), 1, BorderType.Default, new MCvScalar(0));
                }

                Image <Bgr, Byte> frame_display = frame_difference.ToImage <Bgr, Byte>();
                frame_copy.Dispose();

                if (imageBox1.Image != null)
                {
                    imageBox1.Image.Dispose();
                }
                imageBox1.Image = frame_display;
                //  frame_display.Dispose();
            }
            else
            {
                if (imageBox1.Image != null)
                {
                    imageBox1.Image.Dispose();
                }
                imageBox1.Image = frame_in;
            }
        }
Example #27
0
        public static double GetDifference(Mat mat1, Mat mat2, Mat? mask = null)
        {
            if (mask != null && !mask.IsEmpty)
            {
                var mat1Masked = new Mat();
                mat1.CopyTo(mat1Masked, mask);
                mat1 = mat1Masked;

                var mat2Masked = new Mat();
                mat2.CopyTo(mat2Masked, mask);
                mat2 = mat2Masked;
            }

            var foreground = new Mat();
            CvInvoke.AbsDiff(mat1, mat2, foreground);

            return CvInvoke.Mean(foreground).V0;
        }
Example #28
0
        double Similar(ref Mat src, ref Mat src2)
        {
            Matrix <Byte> matrix = new Matrix <Byte>(src.Rows, src.Cols, src.NumberOfChannels);

            src.CopyTo(matrix);

            Mat gray1 = new Mat(),
                gray2 = new Mat();

            CvInvoke.CvtColor(src, gray1, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
            CvInvoke.CvtColor(src2, gray2, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

            var size = new Size(Math.Max(src.Width, src2.Width), Math.Max(src.Height, src2.Height));

            CvInvoke.Resize(gray1, gray1, size);
            CvInvoke.Resize(gray2, gray2, size);
            using (var scaledImg1 = gray1)
                using (var scaledImg2 = gray2)
                {
                    int threshold = 230;
                    CvInvoke.Threshold(scaledImg1, scaledImg1, threshold, 255, ThresholdType.Binary);
                    CvInvoke.Threshold(scaledImg2, scaledImg2, threshold, 255, ThresholdType.Binary);

                    //CvInvoke.CalcHist()
                    Mat res = new Mat();
                    CvInvoke.AbsDiff(scaledImg1, scaledImg2, res);

                    CvInvoke.Imshow("第一张", scaledImg1);
                    CvInvoke.Imshow("第二张", scaledImg2);
                    //Mat mat1 = scaledImg1.Row(0);
                    //Mat mat2 = scaledImg2.Col(0);
                    //var r1 = CvInvoke.CompareHist(scaledImg1, scaledImg2, HistogramCompMethod.Correl);

                    CvInvoke.Imshow("res", res);


                    //var all = 1;// Convert.ToDouble( CvInvoke.Sum(scaledImg1));
                    var sum1   = CvInvoke.Sum(scaledImg1);
                    var sum    = CvInvoke.Sum(res);
                    var result = (1 - sum.V0 / sum1.V0);
                    Console.WriteLine("result:" + result);
                    return(result);
                }
        }
Example #29
0
        static List <ContourProperties> BarrierLocations(Mat image_raw, Mat background)
        {
            int  minArea = 1000;
            int  maxArea = 600000;
            Size frsize  = new Size(image_raw.Width, image_raw.Height);
            Mat  image   = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1);

            ThresholdType            ttype    = 0;
            VectorOfVectorOfPoint    contours = new VectorOfVectorOfPoint();
            List <VectorOfPoint>     contlist = new List <VectorOfPoint>();
            List <ContourProperties> cp_list  = new List <ContourProperties>();
            Mat hierarchy = new Mat();

            CvInvoke.AbsDiff(image_raw, background, image);
            CvInvoke.Threshold(image, image, 50, 255, ttype);
            CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
            Point contourCenter = new Point();

            for (int ind = 0; ind < contours.Size; ind++)
            {
                double area = CvInvoke.ContourArea(contours[ind]);
                if (area > minArea && area < maxArea)
                {
                    contlist.Add(contours[ind]);
                }
            }
            for (int contind = 0; contind < contlist.Count; contind++)
            {
                ContourProperties contprops     = new ContourProperties();
                Rectangle         bounding_rect = CvInvoke.BoundingRectangle(contlist[contind]);
                contourCenter.X  = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2);
                contourCenter.Y  = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2);
                contprops.center = contourCenter;
                contprops.height = bounding_rect.Height;
                cp_list.Add(contprops);
            }

            return(cp_list);
        }
Example #30
0
        public static Bitmap extract_different_mask(Bitmap b1, Bitmap b2)
        {
            Bitmap             ret  = null;
            Image <Gray, Byte> img1 = new Image <Gray, byte>(b1);
            Image <Gray, Byte> img2 = new Image <Gray, byte>(b2);
            Mat diff = new Mat();

            CvInvoke.AbsDiff(img1, img2, diff);
            Mat tmp = new Mat();

            CvInvoke.Threshold(diff, tmp, 0, 255, ThresholdType.Binary | ThresholdType.Otsu);
            diff = tmp;
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(diff, contours, null, RetrType.External, ChainApproxMethod.ChainApproxNone);
                int    count = contours.Size;
                double m     = 0.0;
                int    idx   = -1;
                for (int i = 0; i < count; i++)
                {
                    double d = CvInvoke.ContourArea(contours[i]);
                    if (d > m)
                    {
                        m   = d;
                        idx = i;
                    }
                }
                if (idx >= 0)
                {
                    tmp = Mat.Ones(img2.Mat.Rows, img2.Mat.Cols, img2.Mat.Depth, img2.Mat.NumberOfChannels);
                    VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint();
                    vvp.Push(contours[idx]);
                    CvInvoke.FillPoly(tmp, vvp, new MCvScalar(255));
                    CvInvoke.CvtColor(tmp, diff, ColorConversion.Gray2Bgr);
                    ret = diff.Bitmap;
                }
            }
            return(ret);
        }