Пример #1
0
 public void Add4(ref Mat dst, Mat[] src)
 {
     for (int i = 0; i < 4; i++)
     {
         Cv2.Add(dst, src[i], dst);
     }
 }
Пример #2
0
        private static void Compare(string img)
        {
            var maskname = "2ti_yajirusi_inversion.bmp";
            var rect     = new Rect(685, 125, 60, 50);

            using (Mat color = new Mat($@"{img}"))
                using (var color_small = new Mat(color, rect))
                    using (Mat yellow = new Mat())
                        using (var mask = new Mat($@"image\{maskname}", ImreadModes.GrayScale))
                            using (var result = new Mat())
                            {
                                // 緑っぽいの(RGB=14/255/101)の画素範囲(個人の結果が書かれてるシーンの矢印検出用)
                                Scalar scalar_low  = new Scalar(50, 245, 0); // B,G,R いっつも忘れる
                                Scalar scalar_high = new Scalar(120, 255, 30);

                                // 黄色画像の抽出(2値化)
                                Cv2.InRange(color_small, scalar_low, scalar_high, yellow);
                                Cv2.Add(yellow, mask, result);

                                using (var hist = GetHistogram(result))
                                {
                                    int cnt = (int)hist.Get <float>(0);
                                    if (cnt < 10)
                                    {
                                        Console.WriteLine($"img:[{img}] black_count:[{hist.Get<float>(0)}] white_count:[{hist.Get<float>(255)}]");
                                    }
                                }
                            }
        }
Пример #3
0
        public void 評価結果画像作成_debug(Mat 検査結果, Mat テンプレート, int[,] 正解座標, ref Mat color_debug)
        {
            Mat res_color    = new Mat(new Size(検査結果.Width, 検査結果.Height), MatType.CV_8UC3, Scalar.All(0));
            var temp_color   = res_color.Clone();
            var result_clone = 検査結果.Clone();


            paint_black(ref result_clone, テンプレート);
            CvBlobs blobs = new CvBlobs(result_clone);

            blobs.FilterByArea(9, 250);
            blobs.RenderBlobs(result_clone, res_color);



            Cv2.CvtColor(テンプレート, temp_color, ColorConversionCodes.GRAY2BGR);
            Cv2.Add(temp_color, res_color, color_debug);

            点数計算_debug(blobs, 正解座標, ref color_debug);

            res_color    = null;
            temp_color   = null;
            blobs        = null;
            result_clone = null;
        }
Пример #4
0
        public Mat[] MutateKernel(float mutationRate)
        {
            Mat[] outp = new Mat[kernels.Length];
            for (int i = 0; i < kernels.Length; i++)
            {
                outp[i] = new Mat(kernels[i].Size(), MatType.CV_32FC1);
                Mat rnd = new Mat(kernels[i].Size(), MatType.CV_32FC1);
                rnd.Randu(new Scalar(-mutationRate), new Scalar(mutationRate));
                //rnd.Randu(new Scalar(0.0), new Scalar(0.0));
                //This might work
                Cv2.Add(kernels[i], rnd, kernels[i]);

                /*for (int x = 0; x < rnd.Rows; x++)
                 * {
                 *  for (int j = 0; j < rnd.Cols; j++)
                 *  {
                 *      Console.Write(rnd.At<float>(x, j) + " , ");
                 *  }
                 *  Console.WriteLine();
                 * }
                 * for (int x = 0; x < rnd.Rows; x++)
                 * {
                 *  for (int j = 0; j < rnd.Cols; j++)
                 *  {
                 *      Console.Write(kernels[i].At<float>(x, j) + " , ");
                 *  }
                 *  Console.WriteLine();
                 * }*/
                rnd.Dispose();
            }
            return(outp);
        }
Пример #5
0
        public static Mat ThresholdStairs(this Mat src)
        {
            var dst = new Mat(src.Rows, src.Cols, src.Type());

            var partCount = 10;
            var partWidth = src.Width / partCount;

            for (var i = 0; i < partCount; ++i)
            {
                var th_mat = new Mat();
                Cv2.Threshold(src, th_mat, 255 / 10 * (i + 1), 255, ThresholdType.Binary);
                th_mat.Rectangle(new Rect(0, 0, partWidth * i, src.Height), new Scalar(0), -1);
                th_mat.Rectangle(new Rect(partWidth * (i + 1), 0, src.Width - partWidth * (i + 1), src.Height), new Scalar(0), -1);

                Cv2.Add(dst, th_mat, dst);
            }
            var color_dst = new Mat();

            Cv2.CvtColor(dst, color_dst, ColorConversion.GrayToRgb);
            for (var i = 0; i < partCount; ++i)
            {
                color_dst.Line(partWidth * i, 0, partWidth * i, src.Height, new CvScalar(50, 200, 50), thickness: 2);
            }
            return(color_dst);
        }
Пример #6
0
        public void AddScalar()
        {
            using Mat src = new Mat(2, 2, MatType.CV_8UC1, new byte[] { 1, 2, 3, 4 });
            using Mat dst = new Mat();
            Cv2.Add(new Scalar(10), src, dst);

            Assert.Equal(MatType.CV_8UC1, dst.Type());
            Assert.Equal(2, dst.Rows);
            Assert.Equal(2, dst.Cols);

            Assert.Equal(11, dst.At <byte>(0, 0));
            Assert.Equal(12, dst.At <byte>(0, 1));
            Assert.Equal(13, dst.At <byte>(1, 0));
            Assert.Equal(14, dst.At <byte>(1, 1));

            Cv2.Add(src, new Scalar(10), dst);
            Assert.Equal(11, dst.At <byte>(0, 0));
            Assert.Equal(12, dst.At <byte>(0, 1));
            Assert.Equal(13, dst.At <byte>(1, 0));
            Assert.Equal(14, dst.At <byte>(1, 1));

            using var inputArray = InputArray.Create(10.0);
            Cv2.Add(src, inputArray, dst);
            Assert.Equal(11, dst.At <byte>(0, 0));
            Assert.Equal(12, dst.At <byte>(0, 1));
            Assert.Equal(13, dst.At <byte>(1, 0));
            Assert.Equal(14, dst.At <byte>(1, 1));
        }
Пример #7
0
        public Mat ExtractMaskAffine(Mat sourceImage, List <Point2f> pts1)
        {
            Mat outputMat = new Mat(50, 50, sourceImage.Type());

            var dist   = (from p in pts1 select p).Distinct().ToList(); // 3 dots
            var dstTri = new List <Point>()
            {
                new Point(0.0, 0.0), new Point(50.0, 0.0), new Point(0.0, 50.0)
            };

            var dstTri2f = new List <Point2f>()
            {
                dstTri[0], dstTri[1], dstTri[2]
            };

            Mat lambda = Cv2.GetAffineTransform(dist, dstTri2f);

            Cv2.WarpAffine(sourceImage, outputMat, lambda, outputMat.Size());

            Mat outputPieceWithBG = outputMat.EmptyClone();

            outputPieceWithBG.SetTo(Scalar.White);
            //redraw segment over mat (black)
            outputPieceWithBG.FillConvexPoly(dstTri, Scalar.Black, LineTypes.AntiAlias, 0);

            Mat resultMat = new Mat();

            Cv2.Add(outputMat, outputPieceWithBG, resultMat);

            return(resultMat);
        }
Пример #8
0
        //http://kylog.tistory.com/18
        /// <summary>
        /// Change contrast of Mat. Contrast: -1 ~ 1
        /// </summary>
        /// <param name="contrast">Contrastness. -1 ~ 1</param>
        public static void Contrast(Mat input, Mat output, double contrast)
        {
            var c = contrast > 0 ? 1 / 1 - contrast : 1 + contrast;

            Cv2.Subtract(input, new OpenCvSharp.Scalar(128, 128, 128, 0), output);
            Cv2.Multiply(output, new OpenCvSharp.Scalar(c, c, c, 1), output);
            Cv2.Add(output, new OpenCvSharp.Scalar(128, 128, 128, 0), output);
        }
        /// <summary>
        /// Apply the given warps to a specified image and return the warped result.
        /// </summary>
        /// <param name="sourceImage">The source image to warp</param>
        /// <param name="width">The width of the destination image</param>
        /// <param name="height">The height of the destination image</param>
        /// <param name="warps">The warps to apply</param>
        /// <returns>The warped image</returns>
        public static Mat ApplyWarps(System.Drawing.Bitmap sourceImage, int width, int height, IEnumerable <Warp> warps)
        {
            // set up opencv images for the replacement image and the output
            var source      = BitmapConverter.ToMat(sourceImage);
            var destination = new Mat(height, width, MatType.CV_8UC3);

            destination.SetTo(0);

            // process all warps
            foreach (var warp in warps)
            {
                var t1 = warp.Source.ToPoint2f();
                var t2 = warp.Destination.ToPoint2f();

                // get bounding rects around source and destination triangles
                var r1 = Cv2.BoundingRect(t1);
                var r2 = Cv2.BoundingRect(t2);

                // crop the input image to r1
                Mat img1Cropped = new Mat(r1.Size, source.Type());
                new Mat(source, r1).CopyTo(img1Cropped);

                // adjust triangles to local coordinates within their bounding box
                for (int i = 0; i < t1.Length; i++)
                {
                    t1[i].X -= r1.Left;
                    t1[i].Y -= r1.Top;
                    t2[i].X -= r2.Left;
                    t2[i].Y -= r2.Top;
                }

                // get the transformation matrix to warp t1 to t2
                var transform = Cv2.GetAffineTransform(t1, t2);

                // warp triangle
                var img2Cropped = new Mat(r2.Height, r2.Width, img1Cropped.Type());
                Cv2.WarpAffine(img1Cropped, img2Cropped, transform, img2Cropped.Size());

                // create a mask in the shape of the t2 triangle
                var hull = from p in t2 select new Point(p.X, p.Y);
                var mask = new Mat(r2.Height, r2.Width, MatType.CV_8UC3);
                mask.SetTo(0);
                Cv2.FillConvexPoly(mask, hull, new Scalar(1, 1, 1), LineTypes.Link8, 0);

                // alpha-blend the t2 triangle - this sets all pixels outside the triangle to zero
                Cv2.Multiply(img2Cropped, mask, img2Cropped);

                // cut the t2 triangle out of the destination image
                var target = new Mat(destination, r2);
                Cv2.Multiply(target, new Scalar(1, 1, 1) - mask, target);

                // insert the t2 triangle into the destination image
                Cv2.Add(target, img2Cropped, target);
            }

            // return the destination image
            return(destination);
        }
Пример #10
0
        private Mat AlphaBlending(Mat data, int posX, int posY)
        {
            var dataWidth      = data.Size().Width;
            var dataHeight     = data.Size().Height;
            var dataToBeFilled = new Scalar(0, 0, 0, 0);

            Mat newMat;

            newMat = data.CopyMakeBorder(
                top: posY,
                bottom: SceneSize.Height - dataHeight - posY,
                left: posX,
                right: SceneSize.Width - dataWidth - posX,
                borderType: BorderTypes.Constant,
                value: dataToBeFilled
                );

            if (PreLoadedMats != null)
            {
                PreLoadedMats.MoveNext();

                var baseimg  = PreLoadedMats.Current;
                var newalpha = newMat.Split()[3];

                Mat background = new Mat(SceneSize, MatType.CV_8UC3, 0);
                Mat foreground = new Mat(SceneSize, MatType.CV_8UC3, 0);
                Mat alpha      = new Mat(SceneSize, MatType.CV_8UC3, 0);

                baseimg = baseimg.CvtColor(ColorConversionCodes.RGBA2RGB);
                baseimg.ConvertTo(background, MatType.CV_8UC3);

                newMat = newMat.CvtColor(ColorConversionCodes.RGBA2RGB);
                newMat.ConvertTo(foreground, MatType.CV_8UC3);

                newalpha = newalpha.CvtColor(ColorConversionCodes.RGBA2RGB);
                newalpha.ConvertTo(alpha, MatType.CV_8UC3);

                Cv2.Multiply(alpha, foreground, foreground);
                Cv2.Multiply(1 - (alpha / 255.0), background, background);
                Cv2.Add(background, foreground, newMat);

                background.Dispose();
                foreground.Dispose();
                alpha.Dispose();
                BaseMat.Dispose();
                newalpha.Dispose();
            }
            else
            {
                newMat.CopyTo(BaseMat, newMat.Split()[3]);
            }

            return(newMat);
        }
Пример #11
0
    public void RunScript()
    {
        // Checking that that there exist a file of any type.
        if (!File.Exists(imageFilePath))
        {
            Debug.Log("Image File do not exist!");
            //return;
        }

        Mat image = Cv2.ImRead(imageFilePath, ImreadModes.Color);

        if (image.Empty())
        {
            Debug.Log("No readable image file.");
            return;
        }

        Cv2.NamedWindow("Image", WindowMode.KeepRatio);
        Cv2.NamedWindow("Image2", WindowMode.KeepRatio);
        Cv2.NamedWindow("subImage", WindowMode.KeepRatio);
        Cv2.ImShow("Image", image);

//		OpenCvSharp.Rect roi = new OpenCvSharp.Rect (image.Width / 4, image.Height / 4, image.Width / 2, image.Height / 2);
//		Mat subImage = new Mat (image, roi);
//		//subImage = new Scalar (255, 255, 255) - subImage;
//		Cv2.Add (subImage, subImage, subImage);
//		Cv2.ImShow ("subImage", subImage);
//		Cv2.ImShow ("Image2", image);

        Mat imageGray = image.CvtColor(ColorConversionCodes.BGR2GRAY) / 10;

        // NB! EmptyClone do not overwrite memory, old contntent in memory makes glitch's
        //Mat canvas = imageGray.EmptyClone ();
        Mat canvas = Mat.Zeros(imageGray.Size(), imageGray.Type());



        Vector2 drawingCanvasSize = new Vector2(canvas.Width - squareSize, canvas.Height - squareSize);

        for (int i = 0; i < iteration; ++i)
        {
            OpenCvSharp.Rect drawingRegion = new OpenCvSharp.Rect(
                (int)Random.Range(0, drawingCanvasSize.x - 1),
                (int)Random.Range(0, drawingCanvasSize.y - 1),
                squareSize, squareSize);

            Mat drawingCanvas = new Mat(canvas, drawingRegion);
            Mat drawingSource = new Mat(imageGray, drawingRegion);

            Cv2.Add(drawingCanvas, drawingSource, drawingCanvas);

            Cv2.ImShow("Image2", canvas);
        }
    }
Пример #12
0
        void BlendByMask(Mat dst, Mat src, Mat mask)
        {
            Mat maskedSrc        = new Mat();
            Mat multichannelMask = new Mat();

            Cv2.Merge(new Mat[] { mask, mask, mask }, multichannelMask);

            Cv2.Multiply(src, multichannelMask, maskedSrc);
            Cv2.Multiply(dst, new Scalar(1, 1, 1) - multichannelMask, dst);
            Cv2.Add(dst, maskedSrc, dst);
            maskedSrc.Dispose();
            multichannelMask.Dispose();
        }
Пример #13
0
    void CamUpdate()
    {
        CvUtil.GetWebCamMat(webCamTexture, ref mat);

        mog2.Apply(mat, fg, 0.05f);
        Cv2.GaussianBlur(fg, fg, new Size(21, 21), 0);
        Cv2.Threshold(fg, fg, 30, 255, ThresholdTypes.Binary);
        Cv2.Dilate(fg, fg, nm, default(Point?), 2);
        Cv2.CvtColor(fg, fg, ColorConversionCodes.GRAY2BGRA);
        Cv2.Add(mat, fg, fg);

        CvConvert.MatToTexture2D(fg, ref tex);
        rawImage.texture = tex;
    }
Пример #14
0
        public static void exec()
        {
            var fname    = "base01.mp4";
            var maskname = "yellow_1bit_inversion.bmp";

            var videoCapture = VideoCapture.FromFile($@"image\{fname}");

            videoCapture.Set(CaptureProperty.FrameWidth, 1920);
            videoCapture.Set(CaptureProperty.FrameHeight, 1080);
            videoCapture.Set(CaptureProperty.FrameWidth, 1280);
            videoCapture.Set(CaptureProperty.FrameHeight, 720);

            var mat = new Mat();
            var mat_resize = new Mat();
            Mat yellow_hist, result_hist;

            if (videoCapture.IsOpened())
            {
                for (int i = 0; i < 10000; i++)
                {
                    var ret = videoCapture.Read(mat);

                    Cv2.Resize(mat, mat_resize, new Size(1280, 720));
                    Stopwatch sw = new Stopwatch();
                    sw.Start();
                    var rect     = new Rect(820, 30, 260, 80);
                    var startimg = new Mat(mat_resize, rect);

                    // 黄色の画素範囲
                    Scalar scalar_low  = new Scalar(0, 240, 240);
                    Scalar scalar_high = new Scalar(20, 255, 255);

                    Mat yellow = new Mat();
                    Cv2.InRange(startimg, scalar_low, scalar_high, yellow);

                    var mask   = new Mat($@"image\{maskname}", ImreadModes.GrayScale);
                    var result = new Mat();
                    Cv2.Add(yellow, mask, result);

                    result_hist = GetHistogram(result);
                    sw.Stop();
                    Console.WriteLine($"Elapsed:{sw.ElapsedMilliseconds}");
                    //Cv2.ImShow("movie", mat_resize);
                    Cv2.ImShow("result", result);
                    Cv2.ImShow("result_hist", result_hist);

                    Cv2.WaitKey(1);
                }
            }
        }
Пример #15
0
        public void Add()
        {
            using Mat src1 = new Mat(2, 2, MatType.CV_8UC1, new byte[] { 1, 2, 3, 4 });
            using Mat src2 = new Mat(2, 2, MatType.CV_8UC1, new byte[] { 1, 2, 3, 4 });
            using Mat dst  = new Mat();
            Cv2.Add(src1, src2, dst);

            Assert.Equal(MatType.CV_8UC1, dst.Type());
            Assert.Equal(2, dst.Rows);
            Assert.Equal(2, dst.Cols);

            Assert.Equal(2, dst.At <byte>(0, 0));
            Assert.Equal(4, dst.At <byte>(0, 1));
            Assert.Equal(6, dst.At <byte>(1, 0));
            Assert.Equal(8, dst.At <byte>(1, 1));
        }
Пример #16
0
        private void bn_LogicalSum_Click(object sender, RoutedEventArgs e)
        {
            if (listImage.Count > 0)
            {
                SubWindow.Win_LogicalSum win = new SubWindow.Win_LogicalSum(listImage);
                if (win.ShowDialog() == true)
                {
                    int nMode   = win.nMode;
                    int nIdx1   = win.nSrc1;
                    int nIdx2   = win.nSrc2;
                    Mat matSrc1 = listImage[nIdx1].fn_GetImage();
                    Mat matSrc2 = listImage[nIdx2].fn_GetImage();
                    Mat matDst  = new Mat();
                    timeStart = DateTime.Now;
                    switch (nMode)
                    {
                    case 0:
                        Cv2.Add(matSrc1, matSrc2, matDst);
                        break;

                    case 1:
                        Cv2.Subtract(matSrc1, matSrc2, matDst);
                        break;

                    case 2:
                        //Cv2.Average(matSrc1, matSrc2, matDst);
                        break;

                    case 3:
                        //Cv2.Differential()
                        Cv2.Absdiff(matSrc1, matSrc2, matDst);
                        break;

                    case 4:
                        Cv2.BitwiseAnd(matSrc1, matSrc2, matDst);
                        break;

                    case 5:
                        Cv2.BitwiseOr(matSrc1, matSrc2, matDst);
                        break;
                    }
                    fn_WriteLog($"[Logical Sum] {listImage[nIdx1].Title} + {listImage[nIdx2].Title} : {nMode} ({(DateTime.Now - timeStart).TotalMilliseconds} ms)");
                    fn_NewImage(matDst, $"Logical Sum {nMode}");
                }
            }
        }
        public Mat Apply(Mat input)
        {
            // アルファチャンネルがない場合はアルファチャンネルを付加
            if (input.Channels() <= 3)
            {
                Cv2.CvtColor(input, input, ColorConversionCodes.BGR2BGRA);
            }
            input.ConvertTo(input, MatType.CV_8U);

            // グレースケールに変換
            using var grayscaled = input.CvtColor(ColorConversionCodes.BGRA2GRAY);
            grayscaled.ConvertTo(grayscaled, MatType.CV_16S);               // 負の値も扱えるようにする

            // 画像を縦横それぞれ4倍に拡大
            Cv2.Resize(input, input, OpenCvSharp.Size.Zero, 4.0d, 4.0d, InterpolationFlags.Nearest);
            Cv2.Resize(grayscaled, grayscaled, OpenCvSharp.Size.Zero, 4.0d, 4.0d, InterpolationFlags.Nearest);

            // アルファチャンネルを保存しておく
            using var alphaCh = input.ExtractChannel(3);

            // 元画像をHSVに変換
            Cv2.CvtColor(input, input, ColorConversionCodes.BGRA2BGR);
            Cv2.CvtColor(input, input, ColorConversionCodes.BGR2HSV);

            // ドットを識別する
            using var edgeXY = grayscaled.Filter2D(grayscaled.Type(), kernelDiffXY);

            var output = input.Clone();

            using var brightness      = new Mat();
            using var inputBrightness = new Mat();
            input.ExtractChannel(2).ConvertTo(inputBrightness, MatType.CV_16S);
            Cv2.Add(inputBrightness, edgeXY, brightness);     // もと画像の輝度とedgeXYを足してbrightnessとする
            brightness.ConvertTo(brightness, MatType.CV_8U);
            brightness.InsertChannel(output, 2);              // brightnessをoutputの輝度として設定

            // outputをHSVからBGRAに変換
            Cv2.CvtColor(output, output, ColorConversionCodes.HSV2BGR);
            Cv2.CvtColor(output, output, ColorConversionCodes.BGR2BGRA);

            // outputに元画像のアルファチャンネルをもどす
            alphaCh.InsertChannel(output, 3);

            input.Dispose();
            return(output);
        }
Пример #18
0
        public bool Add(Mat whiteLightImage, Mat edgeMaskImage, out Mat mergedImage)
        {
            bool result = false;

            mergedImage = Mat.Zeros(whiteLightImage.Size(), whiteLightImage.Type());

            try
            {
                Cv2.Add(whiteLightImage, edgeMaskImage, mergedImage);

                result = true;
            }
            catch (Exception ex)
            {
            }

            return(result);
        }
Пример #19
0
    private void showImages()
    {
        Cv2.NamedWindow("Color image", WindowMode.KeepRatio);
        Cv2.NamedWindow("Mask image", WindowMode.KeepRatio);
        Cv2.NamedWindow("Segmentasion image", WindowMode.KeepRatio);
        Cv2.NamedWindow("Edge image", WindowMode.KeepRatio);
        Cv2.NamedWindow("Leaf Candidates", WindowMode.KeepRatio);

        Cv2.ImShow("Color image", plantImageBGR);
        Cv2.ImShow("Mask image", plantMask);
        Cv2.ImShow("Segmentasion image", plantSegmentasionImage);
        Cv2.ImShow("Edge image", plantEdges);

        Mat leafCandidateOnPlant = new Mat();

        Cv2.Add(accumulatedLeafCandidates, plantSegmentasionImage, leafCandidateOnPlant);

        Cv2.ImShow("Leaf Candidates", leafCandidateOnPlant);
    }
Пример #20
0
        public void ScalarOperations()
        {
            var values = new[] { -1f };

            using var mat = new Mat(1, 1, MatType.CV_32FC1, values);
            Assert.Equal(values[0], mat.Get <float>(0, 0));

            Cv2.Subtract(mat, 1, mat);
            Assert.Equal(-2, mat.Get <float>(0, 0));

            Cv2.Multiply(mat, 2.0, mat);
            Assert.Equal(-4, mat.Get <float>(0, 0));

            Cv2.Divide(mat, 2.0, mat);
            Assert.Equal(-2, mat.Get <float>(0, 0));

            Cv2.Add(mat, 1, mat);
            Assert.Equal(-1, mat.Get <float>(0, 0));
        }
        //图像分割
        private void ucBtnExt_A1_BtnClick(object sender, EventArgs e)
        {
            Scalar apple_hsv_min_1 = new Scalar(apple_h_min_1, apple_s_min, apple_v_min);
            Scalar apple_hsv_max_1 = new Scalar(apple_h_max_1, apple_s_max, apple_v_max);
            Scalar apple_hsv_min_2 = new Scalar(apple_h_min_2, apple_s_min, apple_v_min);
            Scalar apple_hsv_max_2 = new Scalar(apple_h_max_2, apple_s_max, apple_v_max);

            Scalar pear_hsv_min = new Scalar(pear_h_min, pear_s_min, pear_v_min);
            Scalar pear_hsv_max = new Scalar(pear_h_max, pear_s_max, pear_v_max);

            Cv2.CvtColor(srcImg, hsvImg, ColorConversionCodes.BGR2HSV);
            this.pictureBoxIpl_img.ImageIpl = hsvImg;
            //输出Log
            newLog = "[msg] 显示HSV图像";
            //等待1s
            Cv2.WaitKey(1000);
            //计算苹果掩膜
            Cv2.InRange(hsvImg, apple_hsv_min_1, apple_hsv_max_1, apple_1);
            Cv2.InRange(hsvImg, apple_hsv_min_2, apple_hsv_max_2, apple_2);
            //合并掩膜
            Cv2.Add(apple_1, apple_2, apple_mask);
            //膨胀腐蚀
            Mat apple_element = Cv2.GetStructuringElement(MorphShapes.Rect,
                                                          new OpenCvSharp.Size(3, 3), new OpenCvSharp.Point(-1, -1));

            Cv2.Dilate(apple_mask, apple_mask, apple_element, new OpenCvSharp.Point(-1, -1), 1);
            Cv2.Erode(apple_mask, apple_mask, apple_element, new OpenCvSharp.Point(-1, -1), 1);
            //计算梨子掩膜
            Cv2.InRange(hsvImg, pear_hsv_min, pear_hsv_max, pear_mask);
            //膨胀腐蚀   注意核大小取值
            Mat pear_element_erode = Cv2.GetStructuringElement(MorphShapes.Rect,
                                                               new OpenCvSharp.Size(7, 7), new OpenCvSharp.Point(-1, -1));
            Mat pear_element_dilate = Cv2.GetStructuringElement(MorphShapes.Rect,
                                                                new OpenCvSharp.Size(7, 7), new OpenCvSharp.Point(-1, -1));

            Cv2.Erode(pear_mask, pear_mask, pear_element_erode, new OpenCvSharp.Point(-1, -1), 1);
            Cv2.Dilate(pear_mask, pear_mask, pear_element_dilate, new OpenCvSharp.Point(-1, -1), 1);
            Cv2.Add(apple_mask, pear_mask, fruit_mask);
            //显示图像分割效果
            this.pictureBoxIpl_img.ImageIpl = fruit_mask;
            //输出日志
            newLog = "[msg] 图像分割完毕";
        }
Пример #22
0
        public void cuda_add()
        {
            Mat  mat1 = Image("lenna.png", ImreadModes.Grayscale);
            Size size = mat1.Size();
            Mat  mat2 = new Mat(size, mat1.Type(), Scalar.Black);

            using (GpuMat g_mat1 = new GpuMat(size, mat1.Type()))
                using (GpuMat dst = new GpuMat()) {
                    GpuMat g_mat2 = new GpuMat(size, mat2.Type());
                    g_mat2.Upload(mat2);
                    g_mat1.Upload(mat1);

                    Cuda.cuda.add(g_mat1, g_mat2, dst);

                    Mat dst_gold = new Mat(size, mat1.Type(), Scalar.Black);
                    Cv2.Add(mat1, mat2, dst_gold);
                    ImageEquals(dst_gold, dst);
                    ShowImagesWhenDebugMode(g_mat1, dst);
                }
        }
Пример #23
0
        public void 評価結果画像作成_debug(Mat 検査結果, Mat テンプレート, int[,] 正解座標, ref Mat color_debug)
        {
            Mat res_color  = new Mat(new Size(検査結果.Width, 検査結果.Height), MatType.CV_8UC3, Scalar.All(0));
            var temp_color = res_color.Clone();

            CvBlobs blobs = new CvBlobs(検査結果);
            int     score = 0;

            blobs.FilterByArea(Main.FilterByArea[0], Main.FilterByArea[1]);
            blobs.RenderBlobs(検査結果, res_color);


            Cv2.CvtColor(テンプレート, temp_color, ColorConversionCodes.GRAY2BGR);
            Cv2.Add(temp_color, res_color, color_debug);

            点数計算_debug(blobs, 正解座標, ref color_debug, ref score);

            res_color.Dispose();
            temp_color.Dispose();
            blobs = null;
        }
Пример #24
0
        /// <summary>
        /// Blend a source image onto a destination image using a mask.
        /// </summary>
        /// <param name="sourceImage">The source image to mask.</param>
        /// <param name="mask">The mask to use.</param>
        /// <param name="destinationImage">The destination image to mask onto.</param>
        /// <returns></returns>
        public static Mat Blend(Mat sourceImage, Mat mask, Mat destinationImage)
        {
            // convert the source image to float
            sourceImage.ConvertTo(sourceImage, MatType.CV_32FC3);

            // convert the mask to float and normalize to 0..1 range
            mask.ConvertTo(mask, MatType.CV_32FC3, 1.0 / 255.0);

            // multiply destination imagfe with 1-mask
            destinationImage.ConvertTo(destinationImage, MatType.CV_32FC3);
            Cv2.Multiply(new Scalar(1.0, 1.0, 1.0) - mask, destinationImage, destinationImage);

            // multiply source image with mask
            Cv2.Multiply(mask, sourceImage, sourceImage);

            // add these two together and convert back to int
            Cv2.Add(sourceImage, destinationImage, destinationImage);
            destinationImage.ConvertTo(destinationImage, MatType.CV_8U);

            return(destinationImage);
        }
Пример #25
0
        static void Main(string[] args)
        {
            // 画像1を読み込む
            using (Mat image1 = Cv2.ImRead("test01.jpeg"))
                // 画像2を読み込む
                using (Mat image2 = Cv2.ImRead("test02.jpeg"))
                    // 差分画像を保存する領域を確保する
                    using (Mat diff = new Mat(new OpenCvSharp.Size(image1.Cols, image1.Rows), MatType.CV_8UC3))
                    {
                        // 画像1と画像2の差分をとる
                        Cv2.Absdiff(image1, image2, diff);
                        // BitmapSourceConverterを利用するとMatをBitmapSourceに変換できる
                        var bitmap = BitmapConverter.ToBitmap(diff);
                        // Sourceに画像を割り当てる
                        bitmap.Save("result.jpeg");

                        Mat merged = new Mat();
                        Cv2.Add(image2, diff, merged);

                        merged.ToBitmap().Save("result2.jpeg");
                    }
        }
Пример #26
0
        public static void exec()
        {
            var fname    = "result_detail.bmp";
            var maskname = "2ti_yajirusi_inversion.bmp";

            Mat color = new Mat($@"image\{fname}");
            var rect  = new Rect(685, 125, 60, 50);

            var sw = new Stopwatch();

            sw.Start();

            var color_small = new Mat(color, rect);

            // 緑っぽいの(RGB=14/255/101)の画素範囲(個人の結果が書かれてるシーンの矢印検出用)
            Scalar scalar_low  = new Scalar(50, 245, 0); // B,G,R いっつも忘れる
            Scalar scalar_high = new Scalar(120, 255, 30);

            // 黄色画像の抽出(2値化)
            Mat yellow = new Mat();

            Cv2.InRange(color_small, scalar_low, scalar_high, yellow);

            var mask = new Mat($@"image\{maskname}", ImreadModes.GrayScale);

            var result = new Mat();

            Cv2.Add(yellow, mask, result);

            sw.Stop();
            Console.WriteLine($"ProcTime:{sw.ElapsedMilliseconds}");

            Cv2.ImShow("color_small", color_small);
            Cv2.ImShow("yellow", yellow);
            Cv2.ImShow("result", result);

            Cv2.WaitKey();
        }
Пример #27
0
        public Mat DecodeSimple(Mat[] imgs, int idx0, int idx1)
        {
            Mat minImg = new Mat(), maxImg = new Mat();

            FindMinMaxImage(imgs, idx0, idx1, out minImg, out maxImg);
            var diff = maxImg - minImg;
            var T    = ((minImg + maxImg) / 2).ToMat();
            //T.ConvertTo(T, MatType.CV_32FC1);
            //Cv2.ImShow("min", minImg / 255f);
            //Cv2.ImShow("max", maxImg / 255f);
            //Cv2.WaitKey();


            var imgSize = imgs[0].Size();
            var result  = new Mat(imgSize, MatType.CV_32FC1);
            var img     = new Mat(imgSize, MatType.CV_32FC1);
            var ones    = Mat.Ones(imgSize, MatType.CV_32FC1);

            for (int i = idx1 - 1; i >= idx0; i--)
            {
                imgs[i].ConvertTo(img, MatType.CV_32FC1);
                result *= 2f;
                Cv2.Add(result, ones, result, img.GreaterThanOrEqual(T));
            }
            //Cv2.ImShow("diff", result * 0.001);

            for (int i = 0; i < imgSize.Height; i++)
            {
                for (int j = 0; j < imgSize.Width; j++)
                {
                    result.Set(i, j, (float)codeToInt[(int)result.At <float>(i, j)]);
                }
            }

            //Cv2.ImShow("disp", result * 0.001);
            //Cv2.WaitKey();
            return(result);
        }
Пример #28
0
        public static void exec()
        {
            var fname    = "01バイト開始.bmp";
            var maskname = "yellow_1bit_inversion.bmp";

            Mat color = new Mat($@"image\{fname}");
            var rect  = new Rect(820, 30, 260, 80);

            var sw = new Stopwatch();

            sw.Start();

            var color_small = new Mat(color, rect);

            // 黄色の画素範囲
            Scalar scalar_low  = new Scalar(0, 240, 240);
            Scalar scalar_high = new Scalar(20, 255, 255);

            // 黄色画像の抽出(2値化)
            Mat yellow = new Mat();

            Cv2.InRange(color_small, scalar_low, scalar_high, yellow);

            var mask = new Mat($@"image\{maskname}", ImreadModes.GrayScale);

            var result = new Mat();

            Cv2.Add(yellow, mask, result);

            sw.Stop();
            Console.WriteLine($"ProcTime:{sw.ElapsedMilliseconds}");

            Cv2.ImShow("yellow", yellow);
            Cv2.ImShow("result", result);

            Cv2.WaitKey();
        }
Пример #29
0
        public Mat DecodeInvXorCode(Mat[] imgs, int idx0, int idx1)
        {
            var imgSize = imgs[0].Size();

            var result  = new Mat(imgSize, MatType.CV_32FC1);
            var diffMax = new Mat(imgSize, MatType.CV_32FC1);

            for (int i = idx1 - 1; i >= idx0; i -= 2)
            {
                var img0 = imgs[i];
                var img1 = imgs[i - 1];
                //imgs[i].ConvertTo(img0, MatType.CV_32FC1);
                //imgs[i - 1].ConvertTo(img1, MatType.CV_32FC1);

                var diff = (img1 - img0).ToMat();
                diff.ConvertTo(diff, MatType.CV_32FC1);
                Cv2.Max(diff, diffMax, diffMax);
                result *= 2;
                Cv2.Add(result, Mat.Ones(imgSize, MatType.CV_32FC1), result, img1.GreaterThan(img0));
            }

            for (int i = 0; i < imgSize.Height; i++)
            {
                for (int j = 0; j < imgSize.Width; j++)
                {
                    result.Set(i, j, (float)codeToInt[(int)result.At <float>(i, j)]);
                }
            }

            var mask         = diffMax.GreaterThanOrEqual(2.0);
            var resultMasked = new Mat(imgSize, MatType.CV_32FC1);

            result.CopyTo(resultMasked, mask);

            return(result);
        }
Пример #30
0
        private void Click_比較開始(object sender, EventArgs e)
        {
            if (比較対象_filtered != null && 検査対象_filtered != null && テンプレート != null)
            {
                if (比較結果 != null)
                {
                    比較結果 = null;
                }
                比較結果 = new Mat[4];



                for (int i = 0; i < 4; i++)
                {
                    Mat mask = 比較対象[i].Clone();

                    var kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(3, 3));
                    mycv.二値化(ref mask, 60);                                  //美品の光っている領域は鏡面反射成分なので比較対象から除きたい
                    Cv2.MorphologyEx(mask, mask, MorphTypes.DILATE, kernel); //鏡面反射箇所を膨張

                    Cv2.Add(mask, テンプレート, mask);

                    比較結果[i] = new Mat(比較対象[i].Height, 比較対象[i].Width, MatType.CV_8UC1);
                    mycv.Absdiff_mask(ref 比較結果[i], 比較対象_filtered[i], 検査対象_filtered[i], mask);
                    Cv2.Add(比較結果[i], テンプレート, 比較結果[i]);


                    mask.Dispose();
                }
            }
            if (radioButton_比較開始.Checked)
            {
                表示画像更新();
            }
            radioButton_比較開始.Checked = true;
        }