/// <summary>
        /// Fast version: Find brightest red pixel in a masked image
        /// Use Laplacian filter to detect areas where the red color component has changed more than blue or green
        /// </summary>
        /// <param name="bgraMat">Image data in BGRA format</param>
        /// <param name="mask">mask of relavant areas to search</param>
        /// <param name="originalWidth">width of the original (non-cropped) image used for parameter tuning</param>
        private Point FastFindBrightestPoint(Mat bgraMat, Mat mask, int originalWidth)
        {
            var ksize   = originalWidth > 1600 ? 15 : 11;
            Mat blurred = new Mat();

            Imgproc.blur(bgraMat, blurred, new Size(ksize, ksize));

            Mat redBlurred = new Mat();

            Core.extractChannel(blurred, redBlurred, 2);

            Mat red = new Mat();

            Core.extractChannel(bgraMat, red, 2);

            red -= redBlurred;

            var minMax = Core.minMaxLoc(red, mask);    // get pixel with strongest contrast towards red among reddish things

            // experimental size detection: only accept small objects as laser pointer
            //Scalar zero = new Scalar(0);
            //Mat fillMask = new Mat(red.height(), red.width(), CvType.CV_8UC1, zero);
            //Rect rect = new Rect();
            //Imgproc.floodFill(red, fillMask, minMax.maxLoc, zero, rect, zero /*new Scalar(minMax.maxVal/2)*/, new Scalar(minMax.maxVal), 4 | Imgproc.FLOODFILL_FIXED_RANGE);
            //if (rect.area() > 1)
            //{
            //    return null;
            //}

            blurred.Dispose();
            redBlurred.Dispose();
            red.Dispose();

            return(minMax.maxLoc);
        }
コード例 #2
0
ファイル: watershed.cs プロジェクト: Hengle/OpenCVForUnity
    // threshold根据亮度去除背景
    private static Mat MyThresholdHsv(Mat frame)
    {
        Mat        hsvImg       = new Mat();
        List <Mat> hsvPlanes    = new List <Mat>();
        Mat        thresholdImg = new Mat();

        // threshold the image with the average hue value
        hsvImg.create(frame.size(), CvType.CV_8U);
        Imgproc.cvtColor(frame, hsvImg, Imgproc.COLOR_BGR2HSV);
        Core.split(hsvImg, hsvPlanes); //3个通道

        // get the average hue value of the image
        Scalar average     = Core.mean(hsvPlanes[0]);
        double threshValue = average.val[0];

        Imgproc.threshold(hsvPlanes[0], thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY_INV);

        Imgproc.blur(thresholdImg, thresholdImg, new Size(15, 15));

        // dilate to fill gaps, erode to smooth edges
        Imgproc.dilate(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 1);
        Imgproc.erode(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 3);

        Imgproc.threshold(thresholdImg, thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY);

        // create the new image
        Mat foreground = new Mat(frame.size(), CvType.CV_8UC3, new Scalar(0, 0, 0));

        thresholdImg.convertTo(thresholdImg, CvType.CV_8U);
        frame.copyTo(foreground, thresholdImg);
        return(foreground);
    }
コード例 #3
0
    public int matchDice(Mat src, OpenCVForUnity.Rect rect, Mat temp)
    {
        Mat subRGB = new Mat(src, rect);

        //灰階
        Mat grayMat = new Mat();

        Imgproc.cvtColor(subRGB, grayMat, Imgproc.COLOR_RGB2GRAY);

        Mat hierarchy = new Mat();
        List <MatOfPoint> contours = new List <MatOfPoint>();

        //模糊.Canny.侵蝕膨脹
        Imgproc.blur(grayMat, grayMat, new Size(3, 3));
        Imgproc.Canny(grayMat, grayMat, 50, 150);
        morphOps(grayMat);

        //找輪廓
        Imgproc.findContours(grayMat, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
        for (int i = 0; i < contours.Count; i++)
        {
            Imgproc.drawContours(temp, contours, i, new Scalar(255, 255, 255), 2);
        }
        //回傳輪廓數目
        return(contours.Count);
    }
コード例 #4
0
    void sharpenImage(Mat image, Mat sharpened)
    {
        Mat blurred = new Mat();

        //Imgproc.GaussianBlur(image, blurred, new Size(0,0), 5);
        Imgproc.blur(image, blurred, new Size(3, 3));
        Core.addWeighted(image, 1 + sharpness, blurred, -1 * sharpness, 0, sharpened);
    }
コード例 #5
0
 // Blurs edges of mask
 private void featherMask()
 {
     using (Mat refined_mask_target_rect = new Mat(refined_mask, target_rect))
     {
         Imgproc.erode(refined_mask, refined_mask, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, feather_amount), new Point(-1, -1), 1, Core.BORDER_CONSTANT, new Scalar(0));
         Imgproc.blur(refined_mask, refined_mask, feather_amount, new Point(-1, -1), Core.BORDER_CONSTANT);
     }
 }
コード例 #6
0
        private void Start()
        {
            var src  = Util.LoadTexture("imori_256x256");
            var dst  = new Mat();
            var size = 3;

            Imgproc.blur(src, dst, new Size(size, size));
            GetComponent <Renderer>().material.mainTexture = Util.MatToTexture2D(dst);
        }
コード例 #7
0
    void ConvertIntoDest(Texture2D srcTexture, RawImage destImage = null)
    {
        srcMat = new Mat(srcTexture.height, srcTexture.width, CvType.CV_8UC3);
        Utils.texture2DToMat(srcTexture, srcMat);

        destMat = new Mat();
        srcMat.copyTo(destMat);

        Imgproc.cvtColor(destMat, destMat, Imgproc.COLOR_BGR2GRAY);
        // Imgproc.GaussianBlur(destMat,destMat,new Size(5,5) , 1);
        Imgproc.blur(destMat, destMat, new Size(low, low));
        Imgproc.threshold(destMat, destMat, 120, 255, Imgproc.THRESH_BINARY);
        Imgproc.Canny(destMat, destMat, 20, 190);

        List <MatOfPoint> contours = new List <MatOfPoint>();

        Imgproc.findContours(destMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        int num = 0;
        List <MatOfPoint> contours_list = new List <MatOfPoint>();

        for (int i = 0; i < contours.Count; i++)
        {
            double area = Imgproc.contourArea(contours[i]);
            print(area);
            // if(area > 1000 && area < 3000){
            //     contours_list.Add(contours[i]);
            //     num = num + 1;
            // }
            if (area > 80)
            {
                contours_list.Add(contours[i]);
                num = num + 1;
            }
        }
        for (int i = 0; i < contours_list.Count; i++)
        {
            Imgproc.drawContours(srcMat, contours_list, -1, new Scalar(0, 255, 0), 4);
        }
        print("Number of valid contours detected : " + contours_list.Count.ToString());
        infoText.text = "Detection : " + contours_list.Count.ToString();

        Texture2D finalTexture = new Texture2D(srcMat.width(), srcMat.height(), TextureFormat.RGB24, false);

        Utils.matToTexture2D(srcMat, finalTexture);

        if (destImage == null)
        {
            srcImage.texture = finalTexture;
        }
        else
        {
            destImage.texture = finalTexture;
            // SaveTextureAsPNG(finalTexture,"CropImageOutput");
            destImage.enabled = true;
        }
    }
コード例 #8
0
        void Start()
        {
            srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/lena.jpg", 1); //512,512
            Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB);

            Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height());

            Utils.matToTexture2D(srcMat, t2d);
            Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

            m_srcImage.sprite                         = sp;
            m_srcImage.preserveAspect                 = true;
            m_srcImage.rectTransform.offsetMin        = new Vector2(0, 0);
            m_srcImage.rectTransform.offsetMax        = new Vector2(t2d.width, t2d.height);
            m_srcImage.rectTransform.anchoredPosition = Vector2.zero;

            //--------------------------------------------------//

            dstMat = Imgcodecs.imread(Application.dataPath + "/Textures/0.jpg", 1); //500,500
            Imgproc.cvtColor(dstMat, dstMat, Imgproc.COLOR_BGR2RGB);
            //dstMat = new Mat();
            Mat grayMat = new Mat();

            detected_edges = new Mat();
            double threshold1  = 1;
            double threshold2  = 100;
            int    kernel_size = 3;

            //使用 3x3内核降噪
            Imgproc.cvtColor(srcMat, grayMat, Imgproc.COLOR_RGB2GRAY);
            Imgproc.blur(grayMat, detected_edges, new Size(3, 3));
            Imgproc.Canny(detected_edges, detected_edges, threshold1, threshold2, kernel_size, false);

            //使用 Canny算子输出边缘作为掩码显示原图像
            //dstMat.setTo(new Scalar(0));
            Imgproc.resize(dstMat, dstMat, srcMat.size());
            //srcMat.copyTo(dstMat, detected_edges); //保证srcMat,dstMat是一样大的
            //左.copyTo(中, 右); //左作为像素源,右作为mask,合成到中

            OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(25, 25, 125, 200);
            srcMat.submat(rect).copyTo(dstMat);

            Texture2D dst_t2d = new Texture2D(dstMat.width(), dstMat.height());

            Utils.matToTexture2D(dstMat, dst_t2d);
            Sprite dst_sp = Sprite.Create(dst_t2d, new UnityEngine.Rect(0, 0, dst_t2d.width, dst_t2d.height), Vector2.zero);

            m_dstImage.sprite                         = dst_sp;
            m_dstImage.preserveAspect                 = true;
            m_dstImage.rectTransform.offsetMin        = new Vector2(0, 0);
            m_dstImage.rectTransform.offsetMax        = new Vector2(dst_t2d.width, dst_t2d.height);
            m_dstImage.rectTransform.anchoredPosition = Vector2.zero;
        }
コード例 #9
0
        ///<summary>
        /// Detects number of electrons in the texture image and return new texture with
        /// contours and out with number of detected electrons
        ///</summary>
        public static Texture2D ApplyScanning(Texture2D srcTexture, int blurSize, out int detectionCount)
        {
            Mat srcMat = new Mat(srcTexture.height, srcTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(srcTexture, srcMat);

            Mat destMat = new Mat();

            srcMat.copyTo(destMat);

            Imgproc.cvtColor(destMat, destMat, Imgproc.COLOR_BGR2GRAY);
            // Imgproc.GaussianBlur(destMat,destMat,new Size(5,5) , 1);
            Imgproc.blur(destMat, destMat, new Size(blurSize, blurSize));
            Imgproc.threshold(destMat, destMat, 120, 255, Imgproc.THRESH_BINARY);
            Imgproc.Canny(destMat, destMat, 20, 190);

            List <MatOfPoint> contours = new List <MatOfPoint>();

            Imgproc.findContours(destMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            int num = 0;
            List <MatOfPoint> contours_list = new List <MatOfPoint>();

            for (int i = 0; i < contours.Count; i++)
            {
                double area = Imgproc.contourArea(contours[i]);
                // print(area);
                // if(area > 1000 && area < 3000){
                //     contours_list.Add(contours[i]);
                //     num = num + 1;
                // }
                if (area > 80)
                {
                    contours_list.Add(contours[i]);
                    num = num + 1;
                }
            }
            detectionCount = num;

            for (int i = 0; i < contours_list.Count; i++)
            {
                Imgproc.drawContours(srcMat, contours_list, -1, new Scalar(0, 255, 0), 4);
            }
            Texture2D scannedTexture = new Texture2D(srcMat.width(), srcMat.height(), TextureFormat.RGB24, false);

            Utils.matToTexture2D(srcMat, scannedTexture);

            return(scannedTexture);
        }
コード例 #10
0
        /// <summary>
        /// Creates an alpha mask texture.
        /// </summary>
        /// <returns>An alpha mask texture.</returns>
        /// <param name="width">The texture width.</param>
        /// <param name="height">The texture height.</param>
        /// <param name="baseArea">The base area.(An array of points in UV coordinate system)</param>
        /// <param name="exclusionAreas">Exclusion areas.(An array of points in UV coordinate system)</param>
        public static Texture2D CreateAlphaMaskTexture(float width, float height, Vector2[] baseArea, params Vector2[][] exclusionAreas)
        {
            Mat baseAreaMaskMat = new Mat((int)height, (int)width, CvType.CV_8UC4);

            baseAreaMaskMat.setTo(new Scalar(0, 0, 0, 255));
            Point[] baseAreaPoints = new Point[baseArea.Length];
            for (int i = 0; i < baseArea.Length; i++)
            {
                baseAreaPoints [i] = new Point(baseArea [i].x * width, height - baseArea [i].y * height);
            }
            Imgproc.fillConvexPoly(baseAreaMaskMat, new MatOfPoint(baseAreaPoints), Scalar.all(255), Imgproc.LINE_AA, 0);
//            Imgproc.erode(baseAreaMaskMat, baseAreaMaskMat, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size (width * 0.01, height * 0.01)), new Point(-1, -1), 1, Core.BORDER_CONSTANT, new Scalar(0, 0, 0, 255));
            Imgproc.blur(baseAreaMaskMat, baseAreaMaskMat, new Size(width * 0.03, height * 0.03));


            Mat exclusionAreaMaskMat = new Mat((int)height, (int)width, CvType.CV_8UC4);

            exclusionAreaMaskMat.setTo(new Scalar(0, 0, 0, 255));
            foreach (Vector2[] exclusionArea in exclusionAreas)
            {
                Point[] points = new Point[exclusionArea.Length];
                for (int i = 0; i < exclusionArea.Length; i++)
                {
                    points [i] = new Point(exclusionArea [i].x * width, height - exclusionArea [i].y * height);
                }
                Imgproc.fillConvexPoly(exclusionAreaMaskMat, new MatOfPoint(points), Scalar.all(255), Imgproc.LINE_AA, 0);
            }
//          Imgproc.dilate(exclusionAreaMaskMat, exclusionAreaMaskMat, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size (width * 0.002, height * 0.002)), new Point(-1, -1), 1, Core.BORDER_CONSTANT, new Scalar(0));
            Imgproc.blur(exclusionAreaMaskMat, exclusionAreaMaskMat, new Size(width * 0.01, height * 0.01), new Point(-1, -1), Core.BORDER_CONSTANT);


            Mat maskMat = new Mat((int)height, (int)width, CvType.CV_8UC4);

            Core.bitwise_xor(baseAreaMaskMat, exclusionAreaMaskMat, maskMat);

            Texture2D texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false);

            Utils.matToTexture2D(maskMat, texture);

            maskMat.Dispose();
            baseAreaMaskMat.Dispose();
            exclusionAreaMaskMat.Dispose();

            return(texture);
        }
コード例 #11
0
ファイル: gradient.cs プロジェクト: Hengle/OpenCVForUnity
    //Canny滤波
    public Sprite CannyGradient()
    {
        Mat    edge       = new Mat();
        double threshold1 = 0;
        double threshold2 = 100;

        Imgproc.blur(grayMat, edge, new Size(3, 3));
        Imgproc.Canny(edge, edge, threshold1, threshold2);
        Core.convertScaleAbs(edge, dstMat);

        // Mat转Texture2D
        Texture2D t2d = new Texture2D(dstMat.cols(), dstMat.rows());

        Utils.matToTexture2D(dstMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        return(sp);
    }
コード例 #12
0
    //简单模糊
    void OnSimpleBlur(bool value)
    {
        m_blurImage.enabled = true;
        if (!value)
        {
            return;
        }

        Mat dstMat = new Mat();

        dstMat = srcMat.clone();
        Size kSize = new Size(10d, 10d);

        Imgproc.blur(dstMat, dstMat, kSize);

        Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height());
        Sprite    sp  = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_blurImage.sprite         = sp;
        m_blurImage.preserveAspect = true;
        Utils.matToTexture2D(dstMat, t2d);
    }
コード例 #13
0
        public void Process(Mat src, Mat dst, bool isBGR = false)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src == null");
            }
            if (dst == null)
            {
                throw new ArgumentNullException("dst == null");
            }

            if (grayMat != null && (grayMat.width() != src.width() || grayMat.height() != src.height()))
            {
                grayMat.Dispose();
                grayMat = null;
                maskMat.Dispose();
                maskMat = null;
                screentoneMat.Dispose();
                screentoneMat = null;
                grayDstMat.Dispose();
                grayDstMat = null;
            }
            grayMat    = grayMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);
            maskMat    = maskMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);
            grayDstMat = grayDstMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);

            if (screentoneMat == null)
            {
                // create a striped screentone.
                screentoneMat = new Mat(src.height(), src.width(), CvType.CV_8UC1, new Scalar(255));
                for (int i = 0; i < screentoneMat.rows() * 2.5f; i = i + 4)
                {
                    Imgproc.line(screentoneMat, new Point(0, 0 + i), new Point(screentoneMat.cols(), -screentoneMat.cols() + i), new Scalar(0), 1);
                }
            }

            if (src.type() == CvType.CV_8UC1)
            {
                src.copyTo(grayMat);
            }
            else if (dst.type() == CvType.CV_8UC3)
            {
                Imgproc.cvtColor(src, grayMat, (isBGR) ? Imgproc.COLOR_BGR2GRAY : Imgproc.COLOR_RGB2GRAY);
            }
            else
            {
                Imgproc.cvtColor(src, grayMat, (isBGR) ? Imgproc.COLOR_BGRA2GRAY : Imgproc.COLOR_RGBA2GRAY);
            }


            // binarize.
            Imgproc.threshold(grayMat, grayDstMat, blackThresh, 255.0, Imgproc.THRESH_BINARY);

            // draw striped screentone.
            Core.LUT(grayMat, grayLUT, maskMat);
            screentoneMat.copyTo(grayDstMat, maskMat);

            // draw main line.
            if (drawMainLine)
            {
                Core.LUT(grayMat, contrastAdjustmentsLUT, maskMat); // = grayMat.convertTo(maskMat, -1, 1.5, 0);

                if (useNoiseFilter)
                {
                    Imgproc.blur(maskMat, grayMat, blurSize);
                    Imgproc.dilate(grayMat, maskMat, kernel_dilate);
                }
                else
                {
                    Imgproc.dilate(maskMat, grayMat, kernel_dilate);
                }
                Core.absdiff(grayMat, maskMat, grayMat);
                Imgproc.threshold(grayMat, maskMat, 25, 255.0, Imgproc.THRESH_BINARY);
                if (useNoiseFilter)
                {
                    Imgproc.erode(maskMat, grayMat, kernel_erode);
                    Core.bitwise_not(grayMat, maskMat);
                    maskMat.copyTo(grayDstMat, grayMat);
                }
                else
                {
                    Core.bitwise_not(maskMat, grayMat);
                    grayMat.copyTo(grayDstMat, maskMat);
                }
            }


            if (dst.type() == CvType.CV_8UC1)
            {
                grayDstMat.copyTo(dst);
            }
            else if (dst.type() == CvType.CV_8UC3)
            {
                Imgproc.cvtColor(grayDstMat, dst, (isBGR) ? Imgproc.COLOR_GRAY2BGR : Imgproc.COLOR_GRAY2RGB);
            }
            else
            {
                Imgproc.cvtColor(grayDstMat, dst, (isBGR) ? Imgproc.COLOR_GRAY2BGRA : Imgproc.COLOR_GRAY2RGBA);
            }
        }
コード例 #14
0
        public void processFrame()
        {
            if (inversion)
            {
                //flip
                Core.bitwise_not(toneMat, toneMat);
            }
            if (resize)
            {
                Imgproc.resize(toneMat, toneMat, new Size((int)Math.Round(resizeRatio * toneMat.width()), (int)Math.Round(resizeRatio * toneMat.height())));
            }
            //
            if (toneThreshold)
            {
                Imgproc.threshold(toneMat, toneMat, thresholdValue, 255, Imgproc.THRESH_BINARY);
            }
            if (blobs)
            {
                blobDetector.detect(toneMat, keypoints);
                Features2d.drawKeypoints(toneMat, keypoints, toneMat);
            }
            if (blur)
            {
                Imgproc.blur(toneMat, toneMat, new Size(blurSize, blurSize));
            }
            if (centerPoint)
            {
                moments.Add(Imgproc.moments(toneMat, true));
                WeightedCentroid.Add(new Point((int)Math.Round(moments[0].m10 / moments[0].m00), (int)Math.Round(moments[0].m01 / moments[0].m00)));
                Debug.Log("center: " + WeightedCentroid[0].x + ", " + WeightedCentroid[0].y);
            }
            if (edge)
            {
                Imgproc.Canny(toneMat, toneMat, thresholdValue * 0.5, thresholdValue);
                //Imgproc.findContours (toneMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE );
                //
                //					foreach(MatOfPoint i in contours){
                //						Debug.Log ("contour " + i + ": " + i.ToString());
                //					}
                //Debug.Log ("contours count: " + contours.Count);
                moments.Add(Imgproc.moments(toneMat, true));
                if (WeightedCentroid.Count == 0)
                {
                    moments.Add(Imgproc.moments(toneMat, true));
                    WeightedCentroid.Add(new Point(0, 0));
                }
                WeightedCentroid.Add(new Point((int)Math.Round(moments[1].m10 / moments[1].m00), (int)Math.Round(moments[1].m01 / moments[1].m00)));

                if (thresholdValue >= thresholdValueCap && edgeCenterPoint == true)
                {
                    Imgproc.ellipse(toneMat, WeightedCentroid [1], new Size(20, 20), 1, 0.1, 360, new Scalar(180), 10);
                    Imgproc.putText(toneMat, " Edge center point", WeightedCentroid [1], 0, 1.5, new Scalar(180), 5);
                }
            }
            //draw center
            if (centerPoint)
            {
                Imgproc.ellipse(toneMat, WeightedCentroid [0], new Size(20, 20), 1, 0.1, 360, new Scalar(180), 10);
                Imgproc.putText(toneMat, " Tone center point", WeightedCentroid [0], 0, 1.5, new Scalar(180), 5);
            }
            if (resize)
            {
                Imgproc.resize(toneMat, toneMat, new Size((int)Math.Round((1 / resizeRatio) * toneMat.width()), (int)Math.Round((1 / resizeRatio) * toneMat.height())));
            }
            //assign to display
            if (showProcessing)
            {
                rgbaMat = toneMat;
            }
            else
            {
                rgbaMat = cloneMat;
            }

            WeightedCentroid.Clear();
            moments.Clear();
            contours.Clear();

            framesDropCount = 0;
        }
コード例 #15
0
 // Blurs edges of mask.
 private void featherMask(Mat refined_masks)
 {
     Imgproc.erode(refined_masks, refined_masks, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, feather_amount), new Point(-1, -1), 1, Core.BORDER_CONSTANT, new Scalar(0));
     Imgproc.blur(refined_masks, refined_masks, feather_amount, new Point(-1, -1), Core.BORDER_CONSTANT);
 }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame() && !imageOptimizationHelper.IsCurrentFrameSkipped())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                //get downScaleMat;
                Mat downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat((rgbaMat));

                //grayscale
                Imgproc.cvtColor(downScaleRgbaMat, gray1Mat, Imgproc.COLOR_RGBA2GRAY);

                //blur
                Imgproc.blur(gray1Mat, gray2Mat, new Size(5, 5));

                //edge filter
                Imgproc.filter2D(gray2Mat, gray1Mat, gray1Mat.depth(), kernel);

                //blur
                Imgproc.blur(gray1Mat, gray2Mat, new Size(3, 3));

                //detect edge
                Imgproc.threshold(gray2Mat, gray2Mat, EDGE_DETECT_VALUE, 255, Imgproc.THRESH_BINARY);

                //copy Mat to byteArray
                Utils.copyFromMat <byte> (gray2Mat, byteArray);

                //set edge pointList
                List <Point> pointList = new List <Point> ();
                int          w         = gray1Mat.width();
                int          h         = gray1Mat.height();
                for (int y = 0; y < h; y++)
                {
                    for (int x = 0; x < w; x++)
                    {
                        if (byteArray [x + w * y] == 255)
                        {
                            pointList.Add(new Point(x, y));
                        }
                    }
                }

                int limit = Mathf.RoundToInt((float)(pointList.Count * POINT_RATE));
                if (limit > POINT_MAX_NUM)
                {
                    limit = POINT_MAX_NUM;
                }

                while (pointList.Count > limit)
                {
                    pointList.RemoveAt(Random.Range(0, pointList.Count));
                }
//              Debug.Log ("pointList.Count " + pointList.Count);


                //init subdiv
                subdiv.initDelaunay(new OpenCVForUnity.CoreModule.Rect(0, 0, downScaleRgbaMat.width(), downScaleRgbaMat.height()));
                for (int i = 0; i < pointList.Count; i++)
                {
                    subdiv.insert(pointList [i]);
                }
                subdiv.insert(new Point(0, 0));
                subdiv.insert(new Point(gray1Mat.width() / 2 - 1, 0));
                subdiv.insert(new Point(gray1Mat.width() - 1, 0));
                subdiv.insert(new Point(gray1Mat.width() - 1, gray1Mat.height() / 2 - 1));
                subdiv.insert(new Point(gray1Mat.width() - 1, gray1Mat.height() - 1));
                subdiv.insert(new Point(gray1Mat.width() / 2 - 1, gray1Mat.height() - 1));
                subdiv.insert(new Point(0, gray1Mat.height() - 1));
                subdiv.insert(new Point(0, gray1Mat.height() / 2 - 1));


                using (MatOfFloat6 triangleList = new MatOfFloat6()) {
                    subdiv.getTriangleList(triangleList);

                    float[] pointArray     = triangleList.toArray();
                    float   downScaleRatio = imageOptimizationHelper.downscaleRatio;
                    if (downScaleRatio < 1)
                    {
                        downScaleRatio = 1;
                    }
                    byte[] color = new byte[4];
                    for (int i = 0; i < pointArray.Length / 6; i++)
                    {
                        Point p0 = new Point(pointArray [i * 6 + 0] * downScaleRatio, pointArray [i * 6 + 1] * downScaleRatio);
                        Point p1 = new Point(pointArray [i * 6 + 2] * downScaleRatio, pointArray [i * 6 + 3] * downScaleRatio);
                        Point p2 = new Point(pointArray [i * 6 + 4] * downScaleRatio, pointArray [i * 6 + 5] * downScaleRatio);

                        if (p0.x < 0 || p0.x > rgbaMat.width())
                        {
                            continue;
                        }
                        if (p0.y < 0 || p0.y > rgbaMat.height())
                        {
                            continue;
                        }
                        if (p1.x < 0 || p1.x > rgbaMat.width())
                        {
                            continue;
                        }
                        if (p1.y < 0 || p1.y > rgbaMat.height())
                        {
                            continue;
                        }
                        if (p2.x < 0 || p2.x > rgbaMat.width())
                        {
                            continue;
                        }
                        if (p2.y < 0 || p2.y > rgbaMat.height())
                        {
                            continue;
                        }


                        //get center of gravity
                        int cx = (int)((p0.x + p1.x + p2.x) * 0.33333);
                        int cy = (int)((p0.y + p1.y + p2.y) * 0.33333);
                        //                Debug.Log ("cx " + cx + " cy " + cy );

                        //get center of gravity color
                        rgbaMat.get(cy, cx, color);
                        //                Debug.Log ("r " + color[0] + " g " + color[1] + " b " + color[2] + " a " + color[3]);

                        //fill Polygon
                        Imgproc.fillConvexPoly(rgbaMat, new MatOfPoint(p0, p1, p2), new Scalar(color [0], color [1], color [2], color [3]), Imgproc.LINE_AA, 0);


//                        Imgproc.line (rgbaMat, p0, p1, new Scalar (64, 255, 128, 255));
//                        Imgproc.line (rgbaMat, p1, p2, new Scalar (64, 255, 128, 255));
//                        Imgproc.line (rgbaMat, p2, p0, new Scalar (64, 255, 128, 255));
                    }
                }

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " DOWNSCALE W:" + downScaleRgbaMat.width () + " H:" + downScaleRgbaMat.height (), new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
コード例 #17
0
        public void blurBackground(OpenCVForUnity.Rect[] _rects, Mat _mat)
        {
            Imgproc.blur(_mat, _mat, new Size(blurPixelSize, blurPixelSize));
            if (colorBlur)
            {
                Core.add(_mat, new Scalar(redBlur, greenBlur, blueBlur), _mat);
            }
            //rect and blur background
            for (int i = 0; i < _rects.Length; i++)
            {
                //              Debug.Log ("detect faces " + rects [i]);


                //copyMat.copyTo(rgbaMat.submat(rects [i].x, rects [i].y, rects [i].x + rects [i].width, rects [i].y + rects [i].height));

                // Check mat range
                if (-rectFactor + _rects [i].y - hightCorrection <= 0)
                {
                    rowRangeTop = 0;
                }
                else
                {
                    rowRangeTop = -rectFactor + _rects [i].y - hightCorrection;
                }
                if (rectFactor + _rects [i].y + _rects [i].height >= copyMat.height())
                {
                    rowRangeButtom = copyMat.height();
                }
                else
                {
                    rowRangeButtom = rectFactor + _rects [i].y + _rects [i].height;
                }
                if (-rectFactor + _rects [i].x <= 0)
                {
                    colRangeleft = 0;
                }
                else
                {
                    colRangeleft = -rectFactor + _rects [i].x;
                }
                if (rectFactor + _rects [i].x + _rects [i].width >= copyMat.width())
                {
                    colRangeRight = copyMat.width();
                }
                else
                {
                    colRangeRight = rectFactor + _rects [i].x + _rects [i].width;
                }

                if (stabilizeRectBtwFrames)
                {
                    if (rowRangeTopLast != null)
                    {
                        rowRangeTopResult    = (int)Math.Round((1 - stabilizeFactor) * rowRangeTop + stabilizeFactor * (int)rowRangeTopLast);
                        rowRangeButtomResult = (int)Math.Round((1 - stabilizeFactor) * rowRangeButtom + stabilizeFactor * (int)rowRangeButtomLast);
                        colRangeleftResult   = (int)Math.Round((1 - stabilizeFactor) * colRangeleft + stabilizeFactor * (int)colRangeleftLast);
                        colRangeRightResult  = (int)Math.Round((1 - stabilizeFactor) * colRangeRight + stabilizeFactor * (int)colRangeRightLast);
                    }
                    else
                    {
                        rowRangeTopResult    = rowRangeTop;
                        rowRangeButtomResult = rowRangeButtom;
                        colRangeleftResult   = colRangeleft;
                        colRangeRightResult  = colRangeRight;
                    }
                }
                else
                {
                    rowRangeTopResult    = rowRangeTop;
                    rowRangeButtomResult = rowRangeButtom;
                    colRangeleftResult   = colRangeleft;
                    colRangeRightResult  = colRangeRight;
                }



                rowRangeTopLast    = rowRangeTop;
                rowRangeButtomLast = rowRangeButtom;
                colRangeleftLast   = colRangeleft;
                colRangeRightLast  = colRangeRight;

                //save values for stabilaztion


                copyMat.rowRange(rowRangeTopResult, rowRangeButtomResult)
                .colRange(colRangeleftResult, colRangeRightResult)
                .copyTo(_mat
                        .rowRange(rowRangeTopResult, rowRangeButtomResult)
                        .colRange(colRangeleftResult, colRangeRightResult));
                //Imgcodecs.imwrite ("Assets/face.jpeg", copyMat);
                if (showRect)
                {
                    Imgproc.rectangle(_mat, new Point(colRangeleftResult, rowRangeTopResult), new Point(colRangeRightResult, rowRangeButtomResult), new Scalar(100, 100, 250, 35), 2);
                }
            }
        }
コード例 #18
0
        public void processFrame()
        {
            //channel split
            Core.split(rgbMat, channelsMats);

            for (int i = 0; i <= channelsMats.Count - 2; i++)
            {
                if (inversion)
                {
                    //flip
                    Core.bitwise_not(channelsMats[i], channelsMats[i]);
                }
                if (resize)
                {
                    Imgproc.resize(channelsMats[i], channelsMats[i], new Size((int)Math.Round(resizeRatio * channelsMats[i].width()), (int)Math.Round(resizeRatio * channelsMats[i].height())));
                }
                //
                if (toneThreshold)
                {
                    Imgproc.threshold(channelsMats[i], channelsMats[i], idealThresholdValue, 255, Imgproc.THRESH_BINARY);
                }

                if (blur)
                {
                    Imgproc.blur(channelsMats[i], channelsMats[i], new Size(blurSize, blurSize));
                }
                if (centerPoint)
                {
                    moments.Add(Imgproc.moments(channelsMats[i], true));
                    WeightedCentroid.Add(new Point((int)Math.Round(moments[i].m10 / moments[i].m00), (int)Math.Round(moments[i].m01 / moments[i].m00)));
                    //Debug.Log("center: " + WeightedCentroid[0].x +", " + WeightedCentroid[0].y);
                }

                //draw center
                if (centerPoint)
                {
                    Imgproc.ellipse(channelsMats[i], WeightedCentroid [i], new Size(20, 20), 1, 0.1, 360, new Scalar(180), 10);
                    Imgproc.putText(channelsMats[i], " Tone center point", WeightedCentroid [i], 0, 1.5, new Scalar(180), 5);
                }
                if (resize)
                {
                    Imgproc.resize(channelsMats[i], channelsMats[i], new Size((int)Math.Round((1 / resizeRatio) * channelsMats[i].width()), (int)Math.Round((1 / resizeRatio) * channelsMats[i].height())));
                }
                //assign to display
                if (showProcessing)
                {
                    //Core.add (rgbMat, channelsMats[i], rgbMat);
                }
                else
                {
                    rgbMat = cloneMat;
                }
            }

            if (edgeCenter)
            {
                Imgproc.Canny(toneMat, toneMat, idealThresholdValue * 0.5, idealThresholdValue);
                //Imgproc.findContours (channel, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE );
                //
                //					foreach(MatOfPoint i in contours){
                //						Debug.Log ("contour " + i + ": " + i.ToString());
                //					}
                //Debug.Log ("contours count: " + contours.Count);
                moments.Add(Imgproc.moments(toneMat, true));
                //if (WeightedCentroidEdge.Count == 0) {
                moments.Add(Imgproc.moments(toneMat, true));
                //	WeightedCentroidEdge.Add(new Point(0,0));
                //}
                WeightedCentroidEdge.Add(new Point((int)Math.Round(moments[moments.Count - 1].m10 / moments[moments.Count - 1].m00), (int)Math.Round(moments[moments.Count - 1].m01 / moments[moments.Count - 1].m00)));

                Imgproc.ellipse(rgbMat, WeightedCentroidEdge [0], new Size(4, 4), 1, 1.5, 360, new Scalar(0, 0, 0, 100), 10);
                Imgproc.putText(rgbMat, " edge center point", WeightedCentroidEdge [0], 0, 1.3, new Scalar(0, 0, 0, 100), 2);
            }
            //display rgb centers
            if (showRgbCenters)
            {
                for (int i = 0; i <= channelsMats.Count - 2; i++)
                {
                    switch (i)
                    {
                    case 0:
                        colorName   = " red";
                        colorScalar = new Scalar(255, 0, 0, 100);
                        break;

                    case 1:
                        colorName   = " green";
                        colorScalar = new Scalar(0, 255, 0, 100);
                        break;

                    case 2:
                        colorName   = " blue";
                        colorScalar = new Scalar(0, 0, 255, 100);
                        break;

                    default:
                        colorName = "color";
                        break;
                    }

                    Imgproc.ellipse(rgbMat, WeightedCentroid [i], new Size(4, 4), 1, 1.5, 360, colorScalar, 10);
                    Imgproc.putText(rgbMat, colorName + " center " + WeightedCentroid [i], WeightedCentroid [i], 0, 1.3, colorScalar, 2);
                    //	Debug.Log ("center " + i + "is: " + WeightedCentroid[i]);
                }
            }
            if (mergeRgbCenters)
            {
                bool edgeInRect = false;
                bool rgbInRect  = false;

                Point rgbAverage = new Point {
                    x = (int)Math.Round(WeightedCentroid.Average(p => p.x)),
                    y = (int)Math.Round(WeightedCentroid.Average(p => p.y))
                };
                //Debug.Log ("average POINT: " + rgbAverage);
                Imgproc.ellipse(rgbMat, rgbAverage, new Size(4, 4), 1, 1.5, 360, new Scalar(120, 120, 120, 255), 10);
                Imgproc.putText(rgbMat, " merged center " + rgbAverage, rgbAverage, 0, 1.3, new Scalar(120, 120, 120, 255), 2);

                if (calculateLocation)
                {
                    if (showLocationRect)
                    {
                        Imgproc.rectangle(rgbMat, new Point(webCamTexture.width * LocationSizeFactor, webCamTexture.height * LocationSizeFactor),
                                          new Point(webCamTexture.width * (1 - LocationSizeFactor), webCamTexture.height * (1 - LocationSizeFactor)), new Scalar(255, 50, 50, 155), 2, 8, 0);
                    }
                    //case edge center in center rect
                    if (WeightedCentroidEdge [0].x <= webCamTexture.width * LocationSizeFactor && WeightedCentroidEdge [0].x >= webCamTexture.width * (1 - LocationSizeFactor) &&
                        WeightedCentroidEdge [0].y <= webCamTexture.height * LocationSizeFactor && WeightedCentroidEdge [0].y >= webCamTexture.height * (1 - LocationSizeFactor))
                    {
                        edgeInRect = true;
                    }
                    //case RGB center in center rect
                    if (rgbAverage.x <= webCamTexture.width * LocationSizeFactor && rgbAverage.x >= webCamTexture.width * (1 - LocationSizeFactor) &&
                        rgbAverage.y <= webCamTexture.height * LocationSizeFactor && rgbAverage.y >= webCamTexture.height * (1 - LocationSizeFactor))
                    {
                        rgbInRect = true;
                    }
                    //case RGB center & edge is out
                    if (rgbInRect && !edgeInRect)
                    {
                        edgeFactor -= locationWeightFactor;
                        if (edgeFactor <= 0)
                        {
                            edgeFactor = 0;
                        }
                    }
                    //case edge center & RGB is out
                    if (!rgbInRect && edgeInRect)
                    {
                        edgeFactor += locationWeightFactor;
                        if (edgeFactor >= 1)
                        {
                            edgeFactor = 1;
                        }
                    }
                    //average with location factors
                    Point edgeAverage = new Point((((1 - edgeFactor) * rgbAverage.x) + ((edgeFactor) * WeightedCentroidEdge[0].x)),
                                                  (((1 - edgeFactor) * rgbAverage.y) + ((edgeFactor) * WeightedCentroidEdge[0].y)));

                    if (popToCenter)
                    {
                        //show pop rect
                        if (showPopToCenterRect)
                        {
                            Imgproc.rectangle(rgbMat, new Point(webCamTexture.width * popToCenterRectFactor, webCamTexture.height * popToCenterRectFactor),
                                              new Point(webCamTexture.width * (1 - popToCenterRectFactor), webCamTexture.height * (1 - popToCenterRectFactor)), new Scalar(50, 50, 180, 100), 2, 8, 0);
                        }
                        //case point inside rect
                        if (edgeAverage.x <= webCamTexture.width * popToCenterRectFactor && edgeAverage.x >= webCamTexture.width * (1 - popToCenterRectFactor) &&
                            edgeAverage.y <= webCamTexture.height * popToCenterRectFactor && edgeAverage.y >= webCamTexture.height * (1 - popToCenterRectFactor))
                        {
                            edgeAverage.x = (int)Math.Round(webCamTexture.width * 0.5);
                            edgeAverage.y = (int)Math.Round(webCamTexture.height * 0.5);
                        }
                    }
                    Imgproc.ellipse(rgbMat, edgeAverage, new Size(6, 6), 1, 1.5, 360, new Scalar(244, 66, 226, 255), 13);
                    Imgproc.putText(rgbMat, " merged center " + edgeAverage, edgeAverage, 0, 1.3, new Scalar(244, 66, 226, 255), 2);
                }
                //average with no location
                if (mergeEdge && WeightedCentroidEdge.Count >= 0 && !calculateLocation)
                {
                    Point edgeAverage = new Point((((1 - edgeFactor) * rgbAverage.x) + ((edgeFactor) * WeightedCentroidEdge [0].x)),
                                                  (((1 - edgeFactor) * rgbAverage.y) + ((edgeFactor) * WeightedCentroidEdge [0].y)));

                    //average with edge factor
                    Imgproc.ellipse(rgbMat, edgeAverage, new Size(6, 6), 1, 1.5, 360, new Scalar(244, 66, 226, 255), 13);
                    Imgproc.putText(rgbMat, " merged center " + edgeAverage, edgeAverage, 0, 1.3, new Scalar(244, 66, 226, 255), 2);
                }
                //#############DISPOSAL
                //case RGB center & edge is out  bring back value
                if (rgbInRect && !edgeInRect)
                {
                    edgeFactor += locationWeightFactor;
                    if (edgeFactor <= 0)
                    {
                        edgeFactor = 0;
                    }
                }
                //case edge center & RGB is out bring back value
                if (!rgbInRect && edgeInRect)
                {
                    edgeFactor -= locationWeightFactor;
                    if (edgeFactor >= 1)
                    {
                        edgeFactor = 1;
                    }
                }
            }

            WeightedCentroid.Clear();
            WeightedCentroidEdge.Clear();
            moments.Clear();
            contours.Clear();
            framesDropCount = 0;
            channelsMats.Clear();
        }
コード例 #19
0
ファイル: DrawBlock.cs プロジェクト: iml885203/csie-seminar
    // =====================
    // depth資料影像處理 ===
    // =====================

    private void updateDepthTexture()
    {
        if (_blockDepthBackGroundImage == null)
        {
            _blockDepthBackGroundImage = new Mat();
            _blockDepthBackGroundImage.setTo(new Scalar(0, 0, 0));
        }
        if (_blockDepthImage == null)
        {
            _blockDepthImage = new Mat();
        }


        // 獲得depth資料與座標
        getDepthData(_minX, _minY, _maxX, _maxY);
        //更新深度距離範圍
        if (mouseclick == 2 && _ScreenSettingCompletionFlag == false)
        {
            InitDepthDistance();
        }
        // 畫出選取區的depth畫面
        _thread = new Thread(drawDepthSourceMat);
        _thread.Start();
        _thread.Join();
        _thread.Abort();

        //做一個新的depthMat存放切割後的depthMat
        Mat subDepthMat = new Mat();

        if (_SyncFlag)
        {
            subDepthMat = _sourceMatDepth.submat(_revertMinY, _revertMaxY, _minX, _maxX);
        }
        else
        {
            subDepthMat = _blockImageBuffer.submat(_revertMinY, _revertMaxY, _minX, _maxX);
        }
        //旋轉畫面(Mat畫出來相反)
        ReversedImage(subDepthMat).copyTo(subDepthMat);

        // 膨脹收縮 處理depth影像
        Mat depthMatchImagePorcess = new Mat();

        subDepthMat.copyTo(depthMatchImagePorcess);
        Mat erodeElement  = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(7, 7));
        Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(5, 5));

        Imgproc.dilate(depthMatchImagePorcess, depthMatchImagePorcess, dilateElement);
        Imgproc.erode(depthMatchImagePorcess, depthMatchImagePorcess, erodeElement);
        //Imgproc.erode(depthMatchImagePorcess, depthMatchImagePorcess, erodeElement);

        //設定背景深度 快捷鍵L
        setDepthSourceBackGroundMat(depthMatchImagePorcess);
        if (mouseclick == 2 && _ScreenSettingCompletionFlag == false)
        {
            InitDepthBackground(depthMatchImagePorcess);
        }

        //減去背景深度
        Core.absdiff(depthMatchImagePorcess, _blockDepthBackGroundImage, depthMatchImagePorcess);
        //二值化
        Imgproc.threshold(depthMatchImagePorcess, depthMatchImagePorcess, 50, 255, Imgproc.THRESH_BINARY);
        //平滑處理(之後嘗試看看)
        SmoothesImage(depthMatchImagePorcess).copyTo(depthMatchImagePorcess);
        //處理鋸齒
        Imgproc.blur(depthMatchImagePorcess, depthMatchImagePorcess, new OpenCVForUnity.Size(8, 8));
        Imgproc.threshold(depthMatchImagePorcess, depthMatchImagePorcess, 50, 255, Imgproc.THRESH_BINARY);
        //傳出深度
        depthMatchImagePorcess.copyTo(_blockDepthImage);

        //圖形壓縮輸出(深度)
        Mat outDepthMat = new Mat(180, 320, CvType.CV_8UC1);

        Imgproc.resize(_blockDepthImage, outDepthMat, outDepthMat.size());

        //旋轉呈現化面
        ReversedImage(outDepthMat).copyTo(outDepthMat);

        //擷取輸出(顯示深度的切割結果)
        Utils.matToTexture2D(outDepthMat, _blockDepthTexture);
        _blockDepthImg.texture = _blockDepthTexture;

        //輸出到遊戲背景
        if (_blockDepthBg != null)
        {
            Mat dilateElementNEW = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(10, 10));
            Imgproc.dilate(outDepthMat, outDepthMat, dilateElementNEW);
            Imgproc.dilate(outDepthMat, outDepthMat, dilateElementNEW);
            Imgproc.blur(outDepthMat, outDepthMat, new Size(10, 10));
            //Core.bitwise_not(outDepthMat, outDepthMat);
            Utils.matToTexture2D(outDepthMat, _blockDepthTextureBg);
            _blockDepthBg.texture = _blockDepthTextureBg;
        }

        subDepthMat.release();
        depthMatchImagePorcess.release();
        outDepthMat.release();
    }