Exemplo n.º 1
0
        public static Mat Absdiff(this Mat src, Mat src2)
        {
            var dst = new Mat();

            Cv2.Absdiff(src, src2, dst);
            return(dst);
        }
Exemplo n.º 2
0
        protected static void ImageEquals(Mat img1, Mat img2, double abs_error)
        {
            if (img1 == null && img2 == null)
            {
                return;
            }
            Assert.NotNull(img1);
            Assert.NotNull(img2);
            Assert.Equal(img1.Type(), img2.Type());
            double abs_sum = abs_error * img1.Width * img1.Height;

            using (var comparison = new Mat()) {
                Cv2.Absdiff(img1, img2, comparison);
                if (img1.Channels() == 1)
                {
                    Assert.False(Cv2.Sum(comparison).Val0 > abs_sum);
                }
                else
                {
                    var channels = Cv2.Split(comparison);
                    try {
                        foreach (var channel in channels)
                        {
                            Assert.False(Cv2.Sum(channel).Val0 > abs_sum);
                        }
                    } finally {
                        foreach (var channel in channels)
                        {
                            channel.Dispose();
                        }
                    }
                }
            }
        }
Exemplo n.º 3
0
        public static double GetMaxDifference(Mat a, Mat b)
        {
            var resultMat = new Mat();

            Cv2.Absdiff(InputArray.Create(a), InputArray.Create(b), resultMat);
            resultMat.MinMaxLoc(out double minVal, out double maxVal);

            return(maxVal);
        }
Exemplo n.º 4
0
 private void DiffExecute()
 {
     using (Mat image1 = Cv2.ImRead(InputImageName1))
         using (Mat image2 = Cv2.ImRead(InputImageName2))
             using (Mat diffImage = new Mat(new OpenCvSharp.Size(image1.Cols, image1.Rows), MatType.CV_8UC3))
             {
                 Cv2.Absdiff(image1, image2, diffImage);
                 DiffImage = BitmapSourceConverter.ToBitmapSource(diffImage);
             }
 }
        //计算两张图片的峰值信噪比PSNR
        private double countPsnr(Mat I1, Mat I2)
        {
            Mat s1 = new Mat();

            Cv2.Absdiff(I1, I2, s1);
            s1.ConvertTo(s1, MatType.CV_32F);                 //转换为32位的float类型,8位不能计算平方
            s1 = s1.Mul(s1);
            Scalar s    = Cv2.Sum(s1);                        //计算每个通道的和
            double sse  = s.Val0 + s.Val1 + s.Val2;
            double mse  = sse / (I1.Channels() * I1.Total()); //  sse/(w*h*3)
            double psnr = 10.0 * Math.Log10((255 * 255) / mse);

            return(psnr);
        }
Exemplo n.º 6
0
        static Mat DiffImage(Mat t0, Mat t1, Mat t2)
        {
            Mat d1 = new Mat();

            Cv2.Absdiff(t2, t1, d1);

            Mat d2 = new Mat();

            Cv2.Absdiff(t1, t0, d2);

            Mat diff = new Mat();

            Cv2.BitwiseAnd(d1, d2, diff);

            return(diff);
        }
Exemplo n.º 7
0
        /// <summary>
        /// Calculate Mean Squared Error between two images
        /// </summary>
        /// <param name="a">First image</param>
        /// <param name="b">Second image</param>
        /// <returns>The MSE value, where 0 = no difference, 1 = a fully different image</returns>
        public static double GetMSE(Mat a, Mat b)
        {
            var s1 = new Mat();

            Cv2.Absdiff(a, b, s1);
            s1.ConvertTo(s1, MatType.CV_32F);      // convert to float
            s1 = s1.Mul(s1);                       // square the matrix

            Scalar s = Cv2.Sum(s1);                // sum elements per channel

            double sse = s.Val0 + s.Val1 + s.Val2; // sum channels

            double mse = sse / (double)(a.Channels() * a.Total());

            return(mse);
        }
Exemplo n.º 8
0
        public float EvaluateFitness(int index, Program.Settings settings)
        {
            Convolve(index, settings);
            Mat targetTmp = new Mat();
            Mat dstTmp    = new Mat();

            targetImgs[index].ConvertTo(targetTmp, MatType.CV_32FC3);
            dstImg.ConvertTo(dstTmp, MatType.CV_32FC3);
            //targetTmp *= -1;

            //Cv2.Add(dstImg, targetTmp, targetTmp, null, MatType.CV_32F);
            Cv2.Absdiff(dstTmp, targetTmp, targetTmp);
            dstTmp.Dispose();

            return(-(float)targetTmp.Sum());
        }
Exemplo n.º 9
0
        private void bn_LogicalSum_Click(object sender, RoutedEventArgs e)
        {
            if (listImage.Count > 0)
            {
                SubWindow.Win_LogicalSum win = new SubWindow.Win_LogicalSum(listImage);
                if (win.ShowDialog() == true)
                {
                    int nMode   = win.nMode;
                    int nIdx1   = win.nSrc1;
                    int nIdx2   = win.nSrc2;
                    Mat matSrc1 = listImage[nIdx1].fn_GetImage();
                    Mat matSrc2 = listImage[nIdx2].fn_GetImage();
                    Mat matDst  = new Mat();
                    timeStart = DateTime.Now;
                    switch (nMode)
                    {
                    case 0:
                        Cv2.Add(matSrc1, matSrc2, matDst);
                        break;

                    case 1:
                        Cv2.Subtract(matSrc1, matSrc2, matDst);
                        break;

                    case 2:
                        //Cv2.Average(matSrc1, matSrc2, matDst);
                        break;

                    case 3:
                        //Cv2.Differential()
                        Cv2.Absdiff(matSrc1, matSrc2, matDst);
                        break;

                    case 4:
                        Cv2.BitwiseAnd(matSrc1, matSrc2, matDst);
                        break;

                    case 5:
                        Cv2.BitwiseOr(matSrc1, matSrc2, matDst);
                        break;
                    }
                    fn_WriteLog($"[Logical Sum] {listImage[nIdx1].Title} + {listImage[nIdx2].Title} : {nMode} ({(DateTime.Now - timeStart).TotalMilliseconds} ms)");
                    fn_NewImage(matDst, $"Logical Sum {nMode}");
                }
            }
        }
Exemplo n.º 10
0
        private void ProcImage2(ref System.Drawing.Bitmap src, ref System.Drawing.Bitmap srcB, out System.Drawing.Bitmap dst)
        {
            dst = null;
            Mat srcImg = BitmapConverter.ToMat(src);

            Cv2.CvtColor(srcImg, srcImg, ColorConversionCodes.BGRA2BGR);

            Mat srcImgB = BitmapConverter.ToMat(srcB);

            Cv2.CvtColor(srcImgB, srcImgB, ColorConversionCodes.BGRA2BGR);

            Mat mask = new Mat();

            Cv2.Absdiff(srcImg, srcImgB, mask);
            Cv2.ImWrite(@"C:\opencv\ImageProcessing\ImageProcessing\Images\absdiff.jpg", mask);
            dst = BitmapConverter.ToBitmap(mask);
        }
Exemplo n.º 11
0
        public static OpenCvSharp.Mat GetDiffInTwoImages(System.Drawing.Bitmap firstState, System.Drawing.Bitmap secondState)
        {
            Mat img1 = firstState.ToMat();
            Mat img2 = secondState.ToMat();
            Mat differenceBetweenImages = new Mat();

            Cv2.Absdiff(img1, img2, differenceBetweenImages);

            // Get the mask if difference greater than threshold
            Mat mask = new Mat(img1.Size(), MatType.CV_8UC1);

            int   threshold = 70;
            Vec3b vectorOfColorsDifference;
            int   curDifferenceLvl;

            Parallel.For(60, differenceBetweenImages.Rows - 200,
                         j =>
            {
                Parallel.For(30, differenceBetweenImages.Cols - 30,
                             i =>
                {
                    vectorOfColorsDifference = differenceBetweenImages.At <Vec3b>(j, i);
                    curDifferenceLvl         = (vectorOfColorsDifference[0] + vectorOfColorsDifference[1] + vectorOfColorsDifference[2]);
                    if (curDifferenceLvl > threshold)
                    {
                        mask.Set <int>(j, i, 255);
                    }
                });
            });

            return(mask);

            /*
             * Mat result = new Mat();
             * Cv2.BitwiseAnd(img2, img2, result, mask);
             * Cv2.Threshold(result, result, 50, 255, ThresholdTypes.Binary);
             * Cv2.CvtColor(result, result, ColorConversionCodes.BGR2GRAY);
             * //GC.Collect();
             #region debug ImShow("res", res)
             * //Cv2.ImShow("res", res);
             * //Cv2.WaitKey();
             #endregion
             * return result;
             */
        }
Exemplo n.º 12
0
        public void cuda_absdiff()
        {
            Mat  mat1 = Image("lenna.png", ImreadModes.Grayscale);
            Size size = mat1.Size();
            Mat  mat2 = new Mat(size, mat1.Type(), new Scalar(2));

            using (GpuMat g_mat1 = new GpuMat(size, mat1.Type()))
                using (GpuMat dst = new GpuMat()) {
                    GpuMat g_mat2 = new GpuMat(size, mat2.Type());
                    g_mat2.Upload(mat2);
                    g_mat1.Upload(mat1);

                    Cuda.cuda.absdiff(g_mat1, g_mat2, dst);

                    Mat dst_gold = new Mat(size, mat1.Type(), Scalar.Black);
                    Cv2.Absdiff(mat1, mat2, dst_gold);
                    ImageEquals(dst_gold, dst);
                    ShowImagesWhenDebugMode(g_mat1, dst);
                }
        }
Exemplo n.º 13
0
        public static Mat GetDiffInTwoImagesWithCustomBorders(System.Drawing.Bitmap firstState, System.Drawing.Bitmap secondState, int xBorder, int yBorder)
        {
            Mat img1 = firstState.ToMat();
            Mat img2 = secondState.ToMat();
            Mat differenceBetweenImages = new Mat();

            Cv2.Absdiff(img1, img2, differenceBetweenImages);

            // Get the mask if difference greater than threshold
            Mat   mask      = new Mat(img1.Size(), MatType.CV_8UC1);
            int   threshold = 70;
            Vec3b vectorOfColorsDifference;
            int   curDifferenceLvl;

            Parallel.For(xBorder, differenceBetweenImages.Rows - xBorder,
                         j =>
            {
                Parallel.For(yBorder, differenceBetweenImages.Cols - yBorder,
                             i =>
                {
                    vectorOfColorsDifference = differenceBetweenImages.At <Vec3b>(j, i);
                    curDifferenceLvl         = (vectorOfColorsDifference[0] + vectorOfColorsDifference[1] + vectorOfColorsDifference[2]);
                    if (curDifferenceLvl > threshold)
                    {
                        mask.Set <int>(j, i, 255);
                    }
                });
            });

            Mat res = new Mat();

            Cv2.BitwiseAnd(img2, img2, res, mask);
            Cv2.Threshold(res, res, 50, 255, ThresholdTypes.Binary);
            Cv2.CvtColor(res, res, ColorConversionCodes.BGR2GRAY);

            #region debug ImShow("res", res)
            //Cv2.ImShow("res", res);
            //Cv2.WaitKey();
            #endregion
            return(res);
        }
Exemplo n.º 14
0
        private void DetectAction(Mat frame)
        {
            bool isdiff = false;

            using Mat gray_frame = new(), img_delta = new(), thresh = new();
            Cv2.CvtColor(frame, gray_frame, ColorConversionCodes.BGR2GRAY);
            Cv2.Resize(gray_frame, gray_frame, new OpenCvSharp.Size(500, 500));
            Cv2.GaussianBlur(gray_frame, gray_frame, new OpenCvSharp.Size(21, 21), 0);
            if (pre_frame is null)
            {
                pre_frame = new();
                gray_frame.CopyTo(pre_frame);
                return;
            }
            else
            {
                Cv2.Absdiff(pre_frame, gray_frame, img_delta);
                Cv2.Threshold(img_delta, thresh, 25, 255, ThresholdTypes.Binary);
                Cv2.Dilate(thresh, thresh, null, null, 2);

                Cv2.FindContours(thresh, out OpenCvSharp.Point[][] contours, out HierarchyIndex[] h,
Exemplo n.º 15
0
        static void Main(string[] args)
        {
            // 画像1を読み込む
            using (Mat image1 = Cv2.ImRead("test01.jpeg"))
                // 画像2を読み込む
                using (Mat image2 = Cv2.ImRead("test02.jpeg"))
                    // 差分画像を保存する領域を確保する
                    using (Mat diff = new Mat(new OpenCvSharp.Size(image1.Cols, image1.Rows), MatType.CV_8UC3))
                    {
                        // 画像1と画像2の差分をとる
                        Cv2.Absdiff(image1, image2, diff);
                        // BitmapSourceConverterを利用するとMatをBitmapSourceに変換できる
                        var bitmap = BitmapConverter.ToBitmap(diff);
                        // Sourceに画像を割り当てる
                        bitmap.Save("result.jpeg");

                        Mat merged = new Mat();
                        Cv2.Add(image2, diff, merged);

                        merged.ToBitmap().Save("result2.jpeg");
                    }
        }
Exemplo n.º 16
0
        private void ProcImage4(ref System.Drawing.Bitmap src, ref System.Drawing.Bitmap srcB, out System.Drawing.Bitmap dst)
        {
            dst = null;
            Mat srcImg = BitmapConverter.ToMat(src);

            Cv2.CvtColor(srcImg, srcImg, ColorConversionCodes.BGRA2BGR);

            Mat srcImgB = BitmapConverter.ToMat(srcB);

            Cv2.CvtColor(srcImgB, srcImgB, ColorConversionCodes.BGRA2BGR);

            Mat mask = new Mat();

            Cv2.Absdiff(srcImg, srcImgB, mask);

            Mat tmp2 = new Mat();

            Cv2.Threshold(mask, tmp2, 35, 255, ThresholdTypes.Binary);

            Cv2.BitwiseAnd(srcImg, tmp2, mask);

            dst = BitmapConverter.ToBitmap(mask);
        }
Exemplo n.º 17
0
        static double getPSNR(Mat I1, Mat I2)
        {
            Mat s1 = new Mat();

            Cv2.Absdiff(I1, I2, s1);               // |I1 - I2|
            s1.ConvertTo(s1, MatType.CV_32F);      // cannot make a square on 8 bits
            s1 = s1.Mul(s1);                       // |I1 - I2|^2

            Scalar s = Cv2.Sum(s1);                // sum elements per channel

            double sse = s.Val0 + s.Val1 + s.Val2; // sum channels

            if (sse <= 1e-10)                      // for small values return zero
            {
                return(0);
            }
            else
            {
                double mse  = sse / (I1.Channels() * I1.Total());
                double psnr = 10.0 * Math.Log10((255 * 255) / mse);
                return(psnr);
            }
        }
Exemplo n.º 18
0
    private static void SegmentationCannyFilledPolygons(Camera camera, out List <Point2f> v_center, out List <float> v_radius)
    {
        RenderTexture activeRenderTexture = RenderTexture.active;

        RenderTexture.active = camera.targetTexture;
        camera.Render();
        Texture2D currentFrame = new Texture2D(camera.targetTexture.width, camera.targetTexture.height);

        currentFrame.ReadPixels(new UnityEngine.Rect(0, 0, camera.targetTexture.width, camera.targetTexture.height), 0, 0);
        currentFrame.Apply();
        RenderTexture.active = activeRenderTexture;
        Mat image = OpenCvSharp.Unity.TextureToMat(currentFrame);

        UnityEngine.Object.Destroy(currentFrame);
        Mat grayImage = new Mat();

        Cv2.CvtColor(image, grayImage, ColorConversionCodes.BGR2GRAY);
        Cv2.EqualizeHist(grayImage, grayImage);
        Cv2.GaussianBlur(grayImage, grayImage, new Size(9, 9), 2, 2);

        Mat edgesImage = new Mat();

        Cv2.Canny(grayImage, edgesImage, 40, 20);
        Point[][]        contours_canny;
        HierarchyIndex[] hierarchy_canny;

        Cv2.FindContours(edgesImage, out contours_canny, out hierarchy_canny, RetrievalModes.List, ContourApproximationModes.ApproxSimple, null);

        Mat img_all_contours_and_filled = Mat.Zeros(edgesImage.Height, edgesImage.Width, MatType.CV_8UC1);
        Mat img_all_contours            = Mat.Zeros(edgesImage.Height, edgesImage.Width, MatType.CV_8UC1);

        for (int j = 0; j < contours_canny.Length; j++)
        {
            Scalar color = new Scalar(255, 255, 255);
            Cv2.DrawContours(img_all_contours_and_filled, contours_canny, j, color, -1, LineTypes.Link8, hierarchy_canny);
            Cv2.DrawContours(img_all_contours, contours_canny, j, color, 1, LineTypes.Link8, hierarchy_canny);
        }
        Mat img_only_closed_contours = new Mat();

        Cv2.Absdiff(img_all_contours_and_filled, img_all_contours, img_only_closed_contours);

        Point[][]        contours;
        HierarchyIndex[] hierarchy;

        Cv2.FindContours(img_only_closed_contours, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxTC89L1, null);

        Point[][]          contours_poly   = new Point[contours.Length][];
        OpenCvSharp.Rect[] boundRect       = new OpenCvSharp.Rect[contours.Length];
        List <Point2f>     contours_center = new List <Point2f> {
        };
        List <float> contours_radius       = new List <float> {
        };

        int i_contour = 0;

        foreach (Point[] contour in contours)
        {
            Point2f contour_center;
            float   contour_radius;

            contours_poly[i_contour] = Cv2.ApproxPolyDP(contour, 3, true);
            Cv2.MinEnclosingCircle(contours_poly[i_contour], out contour_center, out contour_radius);
            //currentFrame = DrawCircle(currentFrame, (int)contour_center.X, (int)contour_center.Y, (int)contour_radius);

            contours_center.Add(contour_center);
            contours_radius.Add(contour_radius);
            i_contour++;
        }

        v_center = contours_center;
        v_radius = contours_radius;

        //TextureToPNG(currentFrame);
    }
Exemplo n.º 19
0
        private static void MakeImagesForArticle()
        {
            var resizeK = 0.2;

            var dir = "Example/";

            var src   = new Mat("0.bmp");
            var src_g = new Mat("0.bmp", LoadMode.GrayScale);

            var src_1   = new Mat("1.bmp");
            var src_1_g = new Mat("1.bmp", LoadMode.GrayScale);

            var background   = new Mat("background.bmp");
            var background_g = new Mat("background.bmp", LoadMode.GrayScale);

            src.Resize(resizeK).ImWrite(dir + "0.png");
            src_g.Resize(resizeK).ImWrite(dir + "0 g.png");
            src_g.ThresholdStairs().Resize(resizeK).ImWrite(dir + "0 g th.png");

            var canny = new Mat();

            Cv2.Canny(src_g, canny, 50, 200);
            canny.Resize(0.5).ImWrite(dir + "0 canny.png");

            Mat[] src_channels;
            Cv2.Split(src, out src_channels);

            for (var i = 0; i < src_channels.Length; ++i)
            {
                var channels = Enumerable.Range(0, src_channels.Length).Select(j => new Mat(src_channels[0].Rows, src_channels[0].Cols, src_channels[0].Type())).ToArray();
                channels[i] = src_channels[i];
                var dst = new Mat();
                Cv2.Merge(channels, dst);
                dst.Resize(resizeK).ImWrite(dir + string.Format("0 ch{0}.png", i));
                src_channels[i].ThresholdStairs().Resize(resizeK).ImWrite(dir + string.Format("0 ch{0} th.png", i));
            }

            if (true)
            {
                src.Resize(0.4).ImWrite(dir + "0.png");
                src_1.Resize(0.4).ImWrite(dir + "1.png");
                background.Resize(0.4).ImWrite(dir + "bg.png");

                var dst_01 = new Mat();
                Cv2.Absdiff(src, src_1, dst_01);
                dst_01.Resize(resizeK).ImWrite(dir + "01.png");
                dst_01.Cut(new Rect(50, src.Height * 4 / 5 - 50, src.Width / 5, src.Height / 5)).ImWrite(dir + "01 part.png");
                dst_01.Cut(new Rect(50, src.Height * 4 / 5 - 50, src.Width / 5, src.Height / 5)).CvtColor(ColorConversion.RgbToGray).ImWrite(dir + "01 g.png");
                dst_01.CvtColor(ColorConversion.RgbToGray).ThresholdStairs().Resize(resizeK).ImWrite(dir + "01 g th.png");

                var dst_01_g = new Mat();
                Cv2.Absdiff(src_g, src_1_g, dst_01_g);
                dst_01_g.Cut(new Rect(50, src.Height * 4 / 5 - 50, src.Width / 5, src.Height / 5)).ImWrite(dir + "0g1g.png");
                dst_01_g.ThresholdStairs().Resize(resizeK).ImWrite(dir + "0g1g th.png");
            }

            if (true)
            {
                var dst_0b = new Mat();
                Cv2.Absdiff(src, background, dst_0b);
                dst_0b.Resize(0.6).ImWrite(dir + "0b.png");

                var dst_0b_g = new Mat();
                Cv2.Absdiff(src_g, background_g, dst_0b_g);
                dst_0b_g.Resize(0.3).ImWrite(dir + "0b g.png");
                dst_0b_g.ThresholdStairs().Resize(0.3).ImWrite(dir + "0b g th.png");
            }
            if (true)
            {
                var hsv_src = new Mat();
                Cv2.CvtColor(src, hsv_src, ColorConversion.RgbToHsv);


                var hsv_background = new Mat();
                Cv2.CvtColor(background, hsv_background, ColorConversion.RgbToHsv);

                var hsv_background_channels = hsv_background.Split();

                var hsv_src_channels = hsv_src.Split();

                if (true)
                {
                    var all = new Mat(src.ToIplImage(), true);
                    for (var i = 0; i < hsv_src_channels.Length; ++i)
                    {
                        hsv_src_channels[i].CvtColor(ColorConversion.GrayToRgb).CopyTo(all, new Rect(i * src.Width / 3, src.Height / 2, src.Width / 3, src.Height / 2));
                    }
                    src_g.CvtColor(ColorConversion.GrayToRgb).CopyTo(all, new Rect(src.Width / 2, 0, src.Width / 2, src.Height / 2));
                    all.Resize(0.3).ImWrite(dir + "all.png");
                }

                foreach (var pair in new[] { "h", "s", "v" }.Select((channel, index) => new { channel, index }))
                {
                    var diff = new Mat();
                    Cv2.Absdiff(hsv_src_channels[pair.index], hsv_background_channels[pair.index], diff);
                    diff.Resize(0.3).With_Title(pair.channel).ImWrite(dir + string.Format("0b {0}.png", pair.channel));
                    diff.ThresholdStairs().Resize(0.3).ImWrite(dir + string.Format("0b {0} th.png", pair.channel));

                    hsv_src_channels[pair.index].Resize(resizeK).With_Title(pair.channel).ImWrite(dir + string.Format("0 {0}.png", pair.channel));

                    foreach (var d in new[] { -100, -50, 50, 100 })
                    {
                        var delta = new Mat(hsv_src_channels[pair.index].ToIplImage(), true);
                        delta.Rectangle(new Rect(0, 0, delta.Width, delta.Height), new Scalar(Math.Abs(d)), -1);

                        var new_channel = new Mat();
                        if (d >= 0)
                        {
                            Cv2.Add(hsv_src_channels[pair.index], delta, new_channel);
                        }
                        else
                        {
                            Cv2.Subtract(hsv_src_channels[pair.index], delta, new_channel);
                        }

                        //delta.ImWrite(dir + string.Format("d{0}{1}.png", pair.channel, d));
                        //new_channel.ImWrite(dir + string.Format("q{0}{1}.png", pair.channel, d));

                        var new_hsv = new Mat();
                        Cv2.Merge(hsv_src_channels.Select((channel, index) => index == pair.index ? new_channel : channel).ToArray(), new_hsv);

                        var res = new Mat();
                        Cv2.CvtColor(new_hsv, res, ColorConversion.HsvToRgb);
                        res.Resize(resizeK).With_Title(string.Format("{0} {1:+#;-#}", pair.channel, d)).ImWrite(dir + string.Format("0 {0}{1}.png", pair.channel, d));
                    }
                }
                //if (true)
                //{
                //  var mat = new Mat(src.ToIplImage(), true);
                //  mat.CopyTo(
                //}
            }
        }
Exemplo n.º 20
0
        private static void DetectBallsForArticle()
        {
            var g_src   = new Mat("0.png", LoadMode.GrayScale);
            var src     = new Mat("0.png");
            var hsv_src = new Mat();

            Cv2.CvtColor(src, hsv_src, ColorConversion.RgbToHsv);

            var background     = new Mat("background.png");
            var g_background   = new Mat("background.png", LoadMode.GrayScale);
            var hsv_background = new Mat();

            Cv2.CvtColor(background, hsv_background, ColorConversion.RgbToHsv);
            var canny = new Mat();
            var dst2  = new Mat();

            Cv2.Canny(g_src, canny, 50, 200);
            Cv2.Threshold(src, dst2, 50, 200, OpenCvSharp.ThresholdType.Binary);
            //Cv2.Subtract(g_src, g_background, dst2);
            //Cv2.Absdiff(g_src, g_background, dst2);

            //Cv2.Subtract(src, background, dst2);
            Cv2.Absdiff(src, background, dst2);

            //dst2.ImWrite("diff.bmp");

            Mat[] dst2_channels;
            Cv2.Split(dst2, out dst2_channels);

            Mat[] background_channels;
            Cv2.Split(background, out background_channels);

            Mat[] hsv_background_channels;
            Cv2.Split(hsv_background, out hsv_background_channels);

            Mat[] hsv_src_channels;
            Cv2.Split(hsv_src, out hsv_src_channels);

            var div_0 = new Mat();

            //Cv2.Divide(dst2_channels[1], background_channels[1], div_0, scale:50);
            Cv2.Divide(background_channels[1], dst2_channels[1], div_0, scale: 40);

            Mat dst2_01  = new Mat();
            Mat dst2_12  = new Mat();
            Mat dst2_012 = new Mat();

            Cv2.Absdiff(dst2_channels[0], dst2_channels[1], dst2_01);
            Cv2.Absdiff(dst2_channels[1], dst2_channels[2], dst2_12);
            Cv2.Add(dst2_01, dst2_12, dst2_012);

            var hsv_diff = Enumerable.Range(0, 3).Select(i => new Mat()).ToArray();

            for (var i = 0; i < 3; ++i)
            {
                Cv2.Absdiff(hsv_src_channels[i], hsv_background_channels[i], hsv_diff[i]);
            }

            //Cv2.Compare(dst2_channels[2], t_dst, t_dst);

            var dst3 = new Mat();

            Cv2.Threshold(dst2_012, dst3, 60, 255, ThresholdType.Binary);
            //OpenCvSharp.CPlusPlus.Cv2.CvtColor(dst2, dst3, OpenCvSharp.ColorConversion.RgbToGray);

            //var circles = OpenCvSharp.CPlusPlus.Cv2.HoughCircles(dst3, OpenCvSharp.HoughCirclesMethod.Gradient, 1, 10, minRadius:10, maxRadius: 80);
            //foreach (var circle in circles)
            //  Console.WriteLine(circle);

            //Console.WriteLine(hsv_diff[0]);

            //hsv_diff[1].ImWrite("hsv_diff_s.bmp");
            DetectBallView(hsv_diff[1], hsv_diff[0]);
            return;

            //using (new Window("src image", src))
            //using (new Window("dst image", background))
            ////using (new Window("canny", canny))
            //using (new Window("dst2 image", dst2))
            //using (new Window("diff0", dst2_channels[1]))
            //using (new Window("bg0", background_channels[1]))
            //using (new Window("dst3 image", div_0))
            using (new Window("src h", hsv_src_channels[0]))
                using (new Window("bg h", hsv_background_channels[0]))
                    using (new Window("d h", hsv_diff[0]))
                        using (new Window("src s", hsv_src_channels[1]))
                            using (new Window("bg s", hsv_background_channels[1]))
                                using (new Window("d s", hsv_diff[1]))
                                    using (new Window("src v", hsv_src_channels[2]))
                                        using (new Window("bg v", hsv_background_channels[2]))
                                            using (new Window("d v", hsv_diff[2]))
                                            {
                                                Cv2.WaitKey();
                                            }
        }
Exemplo n.º 21
0
        public void FindContours(string sLeftPictureFile, string sRightPictureFile)
        {
            Mat tokuLeft  = new Mat();
            Mat tokuRight = new Mat();
            Mat output    = new Mat();

            AKAZE akaze = AKAZE.Create();

            KeyPoint[] keyPointsLeft;
            KeyPoint[] keyPointsRight;

            Mat descriptorLeft  = new Mat();
            Mat descriptorRight = new Mat();

            DescriptorMatcher matcher; //マッチング方法

            DMatch[] matches;          //特徴量ベクトル同士のマッチング結果を格納する配列

            //画像をグレースケールとして読み込み、平滑化する
            Mat Lsrc = new Mat(sLeftPictureFile, ImreadModes.Color);

            //画像をグレースケールとして読み込み、平滑化する
            Mat Rsrc = new Mat(sRightPictureFile, ImreadModes.Color);

            //特徴量の検出と特徴量ベクトルの計算
            akaze.DetectAndCompute(Lsrc, null, out keyPointsLeft, descriptorLeft);
            akaze.DetectAndCompute(Rsrc, null, out keyPointsRight, descriptorRight);


            //画像1の特徴点をoutput1に出力
            Cv2.DrawKeypoints(Lsrc, keyPointsLeft, tokuLeft);
            Image imageLeftToku = BitmapConverter.ToBitmap(tokuLeft);

            pictureBox3.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox3.Image    = imageLeftToku;
            tokuLeft.SaveImage("result/LeftToku.jpg");



            //画像2の特徴点をoutput1に出力
            Cv2.DrawKeypoints(Rsrc, keyPointsRight, tokuRight);
            Image imageRightToku = BitmapConverter.ToBitmap(tokuRight);

            pictureBox4.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox4.Image    = imageRightToku;
            tokuRight.SaveImage("result/RightToku.jpg");

            //総当たりマッチング
            matcher = DescriptorMatcher.Create("BruteForce");
            matches = matcher.Match(descriptorLeft, descriptorRight);

            Cv2.DrawMatches(Lsrc, keyPointsLeft, Rsrc, keyPointsRight, matches, output);
            output.SaveImage(@"result\output.jpg");

            int size         = matches.Count();
            var getPtsSrc    = new Vec2f[size];
            var getPtsTarget = new Vec2f[size];

            int count = 0;

            foreach (var item in matches)
            {
                var ptSrc    = keyPointsLeft[item.QueryIdx].Pt;
                var ptTarget = keyPointsRight[item.TrainIdx].Pt;
                getPtsSrc[count][0]    = ptSrc.X;
                getPtsSrc[count][1]    = ptSrc.Y;
                getPtsTarget[count][0] = ptTarget.X;
                getPtsTarget[count][1] = ptTarget.Y;
                count++;
            }

            // SrcをTargetにあわせこむ変換行列homを取得する。ロバスト推定法はRANZAC。
            var hom = Cv2.FindHomography(
                InputArray.Create(getPtsSrc),
                InputArray.Create(getPtsTarget),
                HomographyMethods.Ransac);

            // 行列homを用いてSrcに射影変換を適用する。
            Mat WarpedSrcMat = new Mat();

            Cv2.WarpPerspective(
                Lsrc, WarpedSrcMat, hom,
                new OpenCvSharp.Size(Rsrc.Width, Rsrc.Height));

            WarpedSrcMat.SaveImage(@"result\Warap.jpg");

            //画像1の特徴点をoutput1に出力
            Image imageLeftSyaei = BitmapConverter.ToBitmap(WarpedSrcMat);

            pictureBox5.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox5.Image    = imageLeftSyaei;


            //画像2の特徴点をoutput1に出力
            Image imageRightSyaei = BitmapConverter.ToBitmap(Rsrc);

            pictureBox6.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox6.Image    = imageRightSyaei;


            Mat LmatFloat = new Mat();

            WarpedSrcMat.ConvertTo(LmatFloat, MatType.CV_16SC3);
            Mat[] LmatPlanes = LmatFloat.Split();

            Mat RmatFloat = new Mat();

            Rsrc.ConvertTo(RmatFloat, MatType.CV_16SC3);
            Mat[] RmatPlanes = RmatFloat.Split();

            Mat diff0 = new Mat();
            Mat diff1 = new Mat();
            Mat diff2 = new Mat();


            Cv2.Absdiff(LmatPlanes[0], RmatPlanes[0], diff0);
            Cv2.Absdiff(LmatPlanes[1], RmatPlanes[1], diff1);
            Cv2.Absdiff(LmatPlanes[2], RmatPlanes[2], diff2);

            Cv2.MedianBlur(diff0, diff0, 5);
            Cv2.MedianBlur(diff1, diff1, 5);
            Cv2.MedianBlur(diff2, diff2, 5);

            diff0.SaveImage("result/diff0.jpg");
            diff1.SaveImage("result/diff1.jpg");
            diff2.SaveImage("result/diff2.jpg");

            Mat wiseMat = new Mat();

            Cv2.BitwiseOr(diff0, diff1, wiseMat);
            Cv2.BitwiseOr(wiseMat, diff2, wiseMat);

            wiseMat.SaveImage("result/wiseMat.jpg");

            Mat openingMat = new Mat();

            Cv2.MorphologyEx(wiseMat, openingMat, MorphTypes.Open, new Mat());

            Mat dilationMat = new Mat();

            Cv2.Dilate(openingMat, dilationMat, new Mat());
            Cv2.Threshold(dilationMat, dilationMat, 100, 255, ThresholdTypes.Binary);
            dilationMat.SaveImage(@"result\dilationMat.jpg");

            Mat LaddMat = new Mat();
            Mat RaddMat = new Mat();

            Console.WriteLine(dilationMat.GetType());
            Console.WriteLine(Rsrc.GetType());

            // dilationMatはグレースケールなので合成先のMatと同じ色空間に変換する
            Mat dilationScaleMat = new Mat();
            Mat dilationColorMat = new Mat();

            Cv2.ConvertScaleAbs(dilationMat, dilationScaleMat);
            Cv2.CvtColor(dilationScaleMat, dilationColorMat, ColorConversionCodes.GRAY2RGB);

            Cv2.AddWeighted(WarpedSrcMat, 0.3, dilationColorMat, 0.7, 0, LaddMat);
            Cv2.AddWeighted(Rsrc, 0.3, dilationColorMat, 0.7, 0, RaddMat);

            Image LaddImage = BitmapConverter.ToBitmap(LaddMat);

            pictureBox7.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox7.Image    = LaddImage;

            Image RaddImage = BitmapConverter.ToBitmap(RaddMat);

            pictureBox8.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox8.Image    = RaddImage;

            RaddMat.SaveImage(@"result\Result.jpg");

            MessageBox.Show("Done!");
        }
Exemplo n.º 22
0
        private Mat MoveDetect(Mat frame1, Mat frame2)
        {
            Mat result = frame2.Clone();


            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(frame1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(frame2, gray2, ColorConversionCodes.BGR2GRAY);
            var diff = new Mat();

            Cv2.Absdiff(gray1, gray2, diff);
            Cv2.ImShow("absdiss", diff);
            Cv2.Threshold(diff, diff, 45, 255, ThresholdTypes.Binary);
            Cv2.ImShow("threshold", diff);

            Mat element  = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));
            Mat element2 = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(25, 25));

            Cv2.Erode(diff, diff, element);
            Cv2.ImShow("erode", diff);

            Cv2.Dilate(diff, diff, element2);
            Cv2.ImShow("dilate", diff);

            Point[][] contours = null;

            //vector<Vec4i> hierarcy;

            HierarchyIndex[] hierarcy = null;
            //画椭圆及中心
            Cv2.FindContours(diff, out contours, out hierarcy, RetrievalModes.External, ContourApproximationModes.ApproxNone);
            //cout << "num=" << contours.size() << endl;
            //contours.Count;
            int X = contours.GetLength(0);
            List <RotatedRect> box = new List <RotatedRect>();

            //for (int i = 0; i < X; i++)
            //{
            //    box[i] = Cv2.FitEllipse(new Mat(contours[i]));
            //    Cv2.Ellipse(result, box[i], new Scalar(0, 255, 0), 2, 8);
            //    Cv2.Circle(result, box[i].center, 3, new Scalar(0, 0, 255), -1, 8);
            //}



            foreach (Point[] pts in contours)
            {
                var box1 = Cv2.FitEllipse(pts);
                Cv2.Ellipse(result, box1, new Scalar(0, 255, 0), 2);


                Point point;
                point.X = (int)box1.Center.X;
                point.Y = (int)box1.Center.X;
                //Cv2.Circle(result, (int)box1.Center.X, (int)box1.Center.X, 1, -1, 8);
                result.Circle(point, 1, -1);



                //Scalar color = Scalar.RandomColor();
                //foreach (Point p in pts)
                //{
                //    result.Circle(p,1, color);
                //}
            }
            return(result);
        }
Exemplo n.º 23
0
    private void UpdateImages()
    {
        if (backgroundMat == null)
        {
            return;
        }

        Mat cameraMat = CvUnity.TextureToMat(webCamTexture, webCamParams);

        if (cameraMat.Width > 512)
        {
            var size = new Size(512, (double)cameraMat.Height / cameraMat.Width * 512);
            cameraMat = cameraMat.Resize(size);
        }

        var width      = cameraMat.Width - cameraOffset.left - cameraOffset.right;
        var height     = cameraMat.Height - cameraOffset.top - cameraOffset.bottom;
        var posX       = cameraOffset.left;
        var posY       = cameraOffset.top;
        var cameraRect = new OpenCvSharp.Rect(posX, posY, width, height);

        cameraMat = cameraMat.Clone(cameraRect);
        Mat trimBgMat = backgroundMat.Clone(cameraRect);
        Mat diffBgMat = cameraMat.Clone();

        Cv2.Absdiff(diffBgMat, trimBgMat, diffBgMat);


        Mat grayscaleMat = new Mat();

        Cv2.CvtColor(diffBgMat, grayscaleMat, ColorConversionCodes.BGR2GRAY);

        //Mat blurMat = new Mat();
        //Cv2.GaussianBlur(grayscaleMat, blurMat, new Size(5, 5), 0);

        Mat maskMat = new Mat();

        Cv2.Threshold(grayscaleMat, maskMat, maskThreshold, maskMaxVal, ThresholdTypes.BinaryInv);
        //Cv2.Threshold(blurMat, maskMat, maskThreshold, maskMaxVal, ThresholdTypes.BinaryInv);

        Mat dilateMat = maskMat.Dilate(new Mat(), null, smoothIterationCount);
        Mat erodeMat  = dilateMat.Erode(new Mat(), null, smoothIterationCount);

        Mat outputMat = erodeMat.Clone();

        float targetAspectRatio = (float)targetMat.Width / targetMat.Height;
        float outputAspectRatio = (float)erodeMat.Width / erodeMat.Height;

        if (targetMat.Height != outputMat.Height)
        {
            var rate = (double)targetMat.Height / outputMat.Height;
            var size = new Size(outputMat.Width * rate, outputMat.Height * rate);
            outputMat = outputMat.Resize(size);
        }

        var rect      = new OpenCvSharp.Rect((outputMat.Width - targetMat.Width) / 2, 0, targetMat.Width, targetMat.Height);
        Mat resizeMat = outputMat.Clone(rect);
        Mat diffMat   = new Mat();

        Cv2.Absdiff(targetMat, resizeMat, diffMat);

        Mat resultColorMat = new Mat(resizeMat.Rows, resizeMat.Cols, MatType.CV_8UC4);

        CountDiff(resizeMat, targetMat, resultColorMat);

        MatToImage(cameraMat, cameraImage, ref cameraTexture);
        //MatToImage(backgroundMat, backgroundImage, ref backgroundTexture);
        //MatToImage(resizeMat, outputImage, ref outputTexture);
        //MatToImage(diffMat, diffImage, ref diffTexture);
        MatToImage(resultColorMat, diffImage, ref diffTexture);
    }
Exemplo n.º 24
0
        public void GetDiffFrame(int width, int height, out double[] buf)
        {
            buf = new double[width * height];
            var frame       = new Mat();
            var diff        = new Mat();
            var rotatedrect = new RotatedRect();

            if (capture.Read(frame))
            {
                frame = frame.Resize(new Size(width, height));
                Cv2.CvtColor(frame, frame, ColorConversionCodes.BGR2GRAY);
                if (PrevFrame != null)
                {
                    Cv2.Absdiff(frame, PrevFrame, diff);
                    double weight = 1;
                    Mat[]  contours;
                    for (int r = 0; r < 2; r++)
                    {
                        Cv2.Threshold(diff, diff, byte.MaxValue / 8, byte.MaxValue, ThresholdTypes.Otsu);

                        var nonzerocnt = Cv2.CountNonZero(diff);
                        weight = (0.25 - ((double)nonzerocnt) / (width * height)) / (0.25);
                        weight = weight < 0 ? 0 : weight;

                        if (weight > 0.5)
                        {
                            Mat h = new Mat();
                            Cv2.FindContours(diff, out contours, new Mat(), RetrievalModes.External, ContourApproximationModes.ApproxTC89KCOS);

                            diff = new Mat(new Size(width, height), MatType.CV_8UC1, new Scalar(0));
                            if (contours.Length > 0)
                            {
                                var areaave = contours.Average(x => Cv2.ContourArea(x));
                                for (int i = 0; i < contours.Length; i++)
                                {
                                    if (Cv2.ContourArea(contours[i]) > areaave)
                                    {
                                        Cv2.DrawContours(diff, contours, i, new Scalar(byte.MaxValue), -1);
                                    }
                                }
                            }
                        }
                        else
                        {
                            diff = new Mat(new Size(width, height), MatType.CV_8UC1, new Scalar(0));
                        }
                    }
                    Point[][]        contourspoint;
                    HierarchyIndex[] hierarchyIndexes;
                    Cv2.FindContours(diff.Clone(), out contourspoint, out hierarchyIndexes, RetrievalModes.External, ContourApproximationModes.ApproxTC89KCOS);
                    if (contourspoint.Length > 0)
                    {
                        var points = new List <Point>();
                        for (int idx_cnt = 0; idx_cnt < contourspoint.GetLength(0); ++idx_cnt)
                        {
                            if (hierarchyIndexes[idx_cnt].Parent != -1)
                            {
                                continue;
                            }
                            points.AddRange(contourspoint[idx_cnt]);
                        }
                        if (points.Count > 5)
                        {
                            diff        = new Mat(new Size(width, height), MatType.CV_8UC1, new Scalar(0));
                            rotatedrect = Cv2.FitEllipse(points);
                            float rho = 0.25f;
                            rotatedrect.Angle       = (rho * rotatedrect.Angle + (1 - rho) * PrevRect.Angle);
                            rotatedrect.Size.Width  = (rho * rotatedrect.Size.Width + (1 - rho) * PrevRect.Size.Width);
                            rotatedrect.Size.Height = (rho * rotatedrect.Size.Height + (1 - rho) * PrevRect.Size.Height);
                            Cv2.Ellipse(diff, rotatedrect, new Scalar(byte.MaxValue), -1);
                        }
                    }

                    double w = 0.8;
                    Cv2.AddWeighted(PrevDiffFrame, w, diff, 1 - w, 0, diff);

                    Mat result = diff.Clone();
                    //Cv2.Threshold(diff, result, byte.MaxValue / 8, byte.MaxValue, ThresholdTypes.Binary);

                    Cv2.Dilate(result, result, new Mat(), new Point(-1, -1), 8);

                    //frame.CopyTo(result, result);

                    unsafe
                    {
                        byte *rslt = (byte *)result.Data;
                        byte *f    = (byte *)frame.Data;
                        for (int i = 0; i < width * height; i++)
                        {
                            double r = (double)rslt[i] / byte.MaxValue;
                            if (r > 0.25)
                            {
                                buf[i] = ((double)f[i] / byte.MaxValue) + 0.25;
                            }
                        }
                    }
                }
                if (PrevFrame == null)
                {
                    PrevFrame     = frame.Clone();
                    PrevDiffFrame = new Mat(PrevFrame.Size(), PrevFrame.Type(), new Scalar(0));
                    PrevRect      = new RotatedRect();
                }
                else
                {
                    double weight = 0.5;
                    Cv2.AddWeighted(PrevFrame, weight, frame, 1 - weight, 0, PrevFrame);
                    PrevDiffFrame = diff.Clone();
                    PrevRect      = rotatedrect;
                }
            }
        }
Exemplo n.º 25
0
        static void Main(string[] args)
        {
            var afWindow = new Window("Annotated Frame");
            var cdWindow = new Window("Contour Delta");

            VideoCapture capture = new VideoCapture("rtsp://10.0.0.104:554/1/h264major");

            int         frameIndex = 0;
            Mat         lastFrame  = new Mat();
            VideoWriter writer     = null;

            while (capture.IsOpened())
            {
                Mat frame = new Mat();

                if (!capture.Read(frame))
                {
                    break;
                }

                Mat grayFrame, dilatedFrame, edges, deltaCopyFrame = new Mat();
                Mat deltaFrame = new Mat();

                try
                {
                    frame = frame.Resize(new Size(0, 0), 0.33, 0.33);
                }
                catch (Exception e)
                {
                }
                grayFrame = frame.CvtColor(ColorConversionCodes.BGR2GRAY);
                grayFrame = grayFrame.GaussianBlur(new Size(21, 21), 0);

                if (frameIndex == 0)
                {
                    frameIndex++;

                    afWindow.Move(0, 0);
                    cdWindow.Move(0, grayFrame.Size().Height);

                    string fileName = "C:\\temp\\capture.avi";

                    string fcc = capture.FourCC;
                    double fps = capture.Get(CaptureProperty.Fps);

                    Size frameSize = new Size(grayFrame.Size().Width, grayFrame.Size().Height);

                    writer = new VideoWriter(fileName, fcc, fps, frameSize);
                    Console.Out.WriteLine("Frame Size = " + grayFrame.Size().Width + " x " + grayFrame.Size().Height);

                    if (!writer.IsOpened())
                    {
                        Console.Out.WriteLine("Error Opening Video File For Write");
                        return;
                    }

                    lastFrame = grayFrame;
                    continue;
                }
                else if (frameIndex % 50 == 0)
                {
                    frameIndex = 0;
                    lastFrame  = grayFrame;
                }

                frameIndex++;

                Cv2.Absdiff(lastFrame, grayFrame, deltaFrame);
                Cv2.Threshold(deltaFrame, deltaFrame, 50, 255, ThresholdTypes.Binary);

                int iterations = 2;
                Cv2.Dilate(deltaFrame, deltaFrame, new Mat(), new Point(), iterations);

                Point[][]        contours;
                HierarchyIndex[] hierarchy;

                Cv2.FindContours(deltaFrame, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple, new Point(0, 0));

                var            countorsPoly = new Point[contours.Length][];
                List <Rect>    boundRect    = new List <Rect>();
                List <Point2f> center       = new List <Point2f>();
                List <float>   radius       = new List <float>();

                for (int i = 0; i < contours.Length; i++)
                {
                    countorsPoly[i] = Cv2.ApproxPolyDP(contours[i], 3, true);
                    if (countorsPoly.Length != 0)
                    {
                        boundRect.Insert(i, Cv2.BoundingRect(countorsPoly[i]));
                        Cv2.MinEnclosingCircle(countorsPoly[i], out Point2f centerObj, out float radiusObj);
                        center.Insert(i, centerObj);
                        radius.Insert(i, radiusObj);
                    }
                }

                for (int i = 0; i < contours.Length; i++)
                {
                    if (countorsPoly.Length != 0)
                    {
                        Scalar color = new Scalar(54, 67, 244);
                        //Cv2.DrawContours(frame, countorsPoly, i, color, 1, LineTypes.Link8, new HierarchyIndex[] { }, 0, new Point());
                        Cv2.Rectangle(frame, boundRect[i].TopLeft, boundRect[i].BottomRight, color, 2, LineTypes.Link8, 0);
                        //Cv2.Circle(frame, (int)center[i].X, (int)center[i].Y, (int)radius[i], color, 2, LineTypes.Link8, 0);
                    }
                }

                afWindow.ShowImage(frame);
                cdWindow.ShowImage(deltaFrame);

                writer.Write(frame);

                switch (Cv2.WaitKey(1))
                {
                case 27:
                    capture.Release();
                    writer.Release();
                    return;
                }
            }
        }
Exemplo n.º 26
0
 public void Substract(Mat img)
 {
     Cv2.Absdiff(ImageMat, img, ImageMat);
     Update();
 }
Exemplo n.º 27
0
 public Mat AbsDiff(Mat image1, Mat image2)
 {
     Cv2.Absdiff(image1, image2, _originImage);
     _currentImage = _originImage;
     return(_currentImage);
 }
        public List <OpenCvSharp.Rect>[] Compare(Mat frame, OpenCvSharp.Point offset1, OpenCvSharp.Point offset2, OpenCvSharp.Size size, int threshold = 64, int count = 5)
        {
            var source1 = new Mat(frame, new OpenCvSharp.Rect(offset1, size));
            var source2 = new Mat(frame, new OpenCvSharp.Rect(offset2, size));

            var difference = new Mat();

            Cv2.Absdiff(source1, source2, difference);

            difference = difference.Threshold(threshold, 255, ThresholdTypes.Binary); // 이거 애매함
            difference = difference.MedianBlur(5);

            var kernel = Mat.Ones(5, 5, MatType.CV_8UC1);

            difference = difference.Dilate(kernel);
            difference = difference.CvtColor(ColorConversionCodes.BGR2GRAY);

            var percentage = Cv2.CountNonZero(difference) * 100.0f / (difference.Width * difference.Height);

            if (percentage > 10.0f)
            {
                return(null);
            }

            var labels      = new Mat();
            var stats       = new Mat();
            var centroids   = new Mat();
            var countLabels = difference.ConnectedComponentsWithStats(labels, stats, centroids);

            var areaList1 = new List <OpenCvSharp.Rect>();
            var areaList2 = new List <OpenCvSharp.Rect>();

            for (var i = 1; i < countLabels; i++)
            {
                var x      = stats.Get <int>(i, 0);
                var y      = stats.Get <int>(i, 1);
                var width  = stats.Get <int>(i, 2);
                var height = stats.Get <int>(i, 3);
                areaList1.Add(new OpenCvSharp.Rect(offset1.X + x, offset1.Y + y, width, height));
                areaList2.Add(new OpenCvSharp.Rect(offset2.X + x, offset2.Y + y, width, height));
            }

            areaList1.Sort((area1, area2) => area1.Width * area1.Height > area2.Width * area2.Height ? -1 : 1);
            areaList2.Sort((area1, area2) => area1.Width * area1.Height > area2.Width * area2.Height ? -1 : 1);

            var cloned = frame.Clone();

            foreach (var area in areaList1)
            {
                cloned.Rectangle(area, new Scalar(0, 0, 255));
            }

            //Cv2.ImShow("before", cloned);
            //Cv2.WaitKey(0);
            //Cv2.DestroyAllWindows();
            //cloned.Dispose();

            var deletedList = new List <OpenCvSharp.Rect>();
            var basedLength = 250;

            for (var i1 = 0; i1 < areaList1.Count; i1++)
            {
                if (deletedList.Contains(areaList1[i1]))
                {
                    continue;
                }

                for (var i2 = i1 + 1; i2 < areaList1.Count; i2++)
                {
                    var scaleLength = Math.Min(50, (int)(10 + 250 / ((Math.Max(areaList1[i1].Width, areaList1[i1].Height) + Math.Max(areaList1[i2].Width, areaList1[i2].Height)) / 2.0f)));
                    var scaledArea  = new OpenCvSharp.Rect(areaList1[i1].X - scaleLength, areaList1[i1].Y - scaleLength, areaList1[i1].Width + scaleLength * 2, areaList1[i1].Height + scaleLength * 2);
                    var overlapped  = scaledArea & areaList1[i2];
                    if (overlapped.Width != 0 && overlapped.Height != 0)
                    {
                        deletedList.Add(areaList1[i2]);
                    }
                }
            }

            foreach (var deleted in deletedList)
            {
                areaList1.Remove(deleted);
            }

            cloned = frame.Clone();
            foreach (var area in areaList1)
            {
                cloned.Rectangle(area, new Scalar(0, 0, 255));
            }

            //Cv2.ImShow("after", cloned);
            //Cv2.WaitKey(0);
            //Cv2.DestroyAllWindows();
            //cloned.Dispose();


            if (areaList1.Count != count)
            {
                if (threshold < 0)
                {
                    return(null);
                }

                return(Compare(frame, offset1, offset2, size, threshold - 1, count));
            }

            deletedList.Clear();
            for (var i1 = 0; i1 < areaList2.Count; i1++)
            {
                if (deletedList.Contains(areaList2[i1]))
                {
                    continue;
                }

                for (var i2 = i1 + 1; i2 < areaList2.Count; i2++)
                {
                    var scaleLength = Math.Min(50, (int)(10 + 250 / ((Math.Max(areaList2[i1].Width, areaList2[i1].Height) + Math.Max(areaList2[i2].Width, areaList2[i2].Height)) / 2.0f)));
                    var scaledArea  = new OpenCvSharp.Rect(areaList2[i1].X - scaleLength, areaList2[i1].Y - scaleLength, areaList2[i1].Width + scaleLength * 2, areaList2[i1].Height + scaleLength * 2);
                    var overlapped  = scaledArea & areaList2[i2];
                    if (overlapped.Width != 0 && overlapped.Height != 0)
                    {
                        deletedList.Add(areaList2[i2]);
                    }
                }
            }

            foreach (var deleted in deletedList)
            {
                areaList2.Remove(deleted);
            }

            return(new List <OpenCvSharp.Rect>[] { areaList1, areaList2 });
        }
Exemplo n.º 29
0
        static void Main(string[] args)
        {
            Mat mat1 = null, mat2 = null, mat3 = null;

            var frame   = new Mat();
            var capture = new VideoCapture(0);

            capture.Open(0);

            if (capture.IsOpened())
            {
                while (true)
                {
                    capture.Read(frame);
                    Console.WriteLine("read");

                    mat3 = frame;

                    if (mat1 != null)
                    {
                        //var image = DiffImage(mat3, mat2, mat1).ToBitmap();
                        Mat d1 = new Mat();
                        Cv2.Absdiff(mat3, mat2, d1);

                        var image = mat3.ToBitmap();

                        //image.Save("1.jpg", ImageFormat.Jpeg);
                        //Console.WriteLine("save");

                        var grayImage = new Mat();
                        Cv2.CvtColor(mat3, grayImage, ColorConversionCodes.BGRA2GRAY);
                        Cv2.EqualizeHist(grayImage, grayImage);

                        var cascade       = new CascadeClassifier(@".\CascadeClassifiers\haarcascade_frontalface_alt2.xml");
                        var nestedCascade = new CascadeClassifier(@".\CascadeClassifiers\haarcascade_eye_tree_eyeglasses.xml");

                        var faces = cascade.DetectMultiScale(
                            image: grayImage,
                            scaleFactor: 1.1,
                            minNeighbors: 2,
                            flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                            minSize: new Size(30, 30)
                            );

                        Console.WriteLine("Detected faces: {0}", faces.Length);

                        var srcImage = mat3;

                        //new BodyDetector().Detect(mat3);
                        faces.ToList().ForEach(e => e.Height += 10);
                        foreach (var faceRect in faces)
                        {
                            Cv2.Rectangle(frame, faceRect, Scalar.Red, 2);
                            //var a = new Mat(srcImage, faceRect);
                            //var eigenValues = OutputArray.Create(a);
                            //var eigenVectors = OutputArray.Create(a);
                            //Cv2.Eigen(a, eigenValues, eigenVectors);
                        }

                        Cv2.ImShow("Source", mat3);
                        Cv2.WaitKey(1); // do events

                        //var count = 1;
                        //foreach (var faceRect in faces)
                        //{
                        //    var detectedFaceImage = new Mat(srcImage, faceRect);
                        //    Cv2.ImShow(string.Format("Face {0}", count), detectedFaceImage);
                        //    Cv2.WaitKey(1); // do events

                        //    //var color = Scalar.FromRgb(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255));
                        //    Cv2.Rectangle(srcImage, faceRect, Scalar.Red, 3);

                        //    var detectedFaceGrayImage = new Mat();
                        //    Cv2.CvtColor(detectedFaceImage, detectedFaceGrayImage, ColorConversionCodes.BGRA2GRAY);
                        //    var nestedObjects = nestedCascade.DetectMultiScale(
                        //        image: detectedFaceGrayImage,
                        //        scaleFactor: 1.1,
                        //        minNeighbors: 2,
                        //        flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                        //        minSize: new Size(30, 30)
                        //    );

                        //    Console.WriteLine("Nested Objects[{0}]: {1}", count, nestedObjects.Length);

                        //    foreach (var nestedObject in nestedObjects)
                        //    {
                        //        var center = new Point
                        //        {
                        //            X = (int)(Math.Round(nestedObject.X + nestedObject.Width * 0.5, MidpointRounding.ToEven) + faceRect.Left),
                        //            Y = (int)(Math.Round(nestedObject.Y + nestedObject.Height * 0.5, MidpointRounding.ToEven) + faceRect.Top)
                        //        };
                        //        var radius = Math.Round((nestedObject.Width + nestedObject.Height) * 0.25, MidpointRounding.ToEven);
                        //        Cv2.Circle(srcImage, center, (int)radius, Scalar.Red, thickness: 3);
                        //    }

                        //    count++;
                        //}

                        //Cv2.ImShow("Haar Detection", srcImage);
                        //Cv2.WaitKey(1); // do events
                    }

                    mat1 = mat2;
                    mat2 = mat3;
                }
            }
        }
Exemplo n.º 30
0
        public void GetDiffFrame(int width, int height, out double[] buf)
        {
            buf = new double[width * height];
            var frame = new Mat();
            var diff  = new Mat();

            if (capture.Read(frame))
            {
                frame = frame.Resize(new Size(width, height));
                Cv2.CvtColor(frame, frame, ColorConversionCodes.BGR2GRAY);
                if (PrevFrame != null)
                {
                    diff = frame.Clone();
                    Cv2.Absdiff(frame, PrevFrame, diff);
                    Cv2.Threshold(diff, diff, byte.MaxValue * 0.25, byte.MaxValue, ThresholdTypes.Binary);

                    Point[][]        contours;
                    HierarchyIndex[] hierarchyIndexes;
                    Cv2.FindContours(diff.Clone(), out contours, out hierarchyIndexes, RetrievalModes.List, ContourApproximationModes.ApproxTC89KCOS);
                    if (contours.Length > 0)
                    {
                        var points = new List <Point>();
                        for (int idx_cnt = 0; idx_cnt < contours.GetLength(0); ++idx_cnt)
                        {
                            if (hierarchyIndexes[idx_cnt].Parent != -1)
                            {
                                continue;
                            }
                            points.AddRange(contours[idx_cnt]);
                        }
                        Cv2.DrawContours(diff, new List <List <Point> >(new List <Point>[] { new List <Point>(Cv2.ConvexHull(points.ToArray())) }), 0, new Scalar(byte.MaxValue), -1);
                    }
                    var masked = diff.Clone();
                    frame.CopyTo(masked, diff);
                    Cv2.BitwiseOr(diff, PrevDiffFrame, diff);
                    Cv2.AddWeighted(masked, 0.5, diff, 0.5, 0, diff);

                    if (PrevFrame != null)
                    {
                        unsafe
                        {
                            byte *p  = (byte *)frame.Data;
                            byte *pv = (byte *)diff.Data;
                            for (int i = 0; i < width * height; i++)
                            {
                                buf[i] = (double)pv[i] / byte.MaxValue;
                            }
                        }
                    }
                }
                if (PrevFrame == null)
                {
                    PrevFrame     = frame.Clone();
                    PrevDiffFrame = new Mat(PrevFrame.Size(), PrevFrame.Type());
                }
                else
                {
                    double weight = 0.75;
                    Cv2.AddWeighted(PrevFrame, weight, frame, 1 - weight, 0, PrevFrame);
                    PrevDiffFrame = diff.Clone();
                }
            }
        }