// Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                //first find blue objects
                Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
                Core.inRange(hsvMat, blue.getHSVmin(), blue.getHSVmax(), thresholdMat);
                morphOps(thresholdMat);
                trackFilteredObject(blue, thresholdMat, hsvMat, rgbMat);

                //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
コード例 #2
0
        // Update is called once per frame
        void Update()
        {
            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);

                //Debug.Log ("Mat toString " + rgbMat.ToString ());

                Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
コード例 #3
0
        protected void ProcessImage(Mat frameMatrix, Mat grayMatrix, ImageProcessingType imageProcessingType)
        {
            switch (imageProcessingType)
            {
            case ImageProcessingType.DrawLine:
                Imgproc.line(
                    frameMatrix,
                    new Point(0, 0),
                    new Point(frameMatrix.cols(), frameMatrix.rows()),
                    new Scalar(255, 0, 0, 255),
                    4
                    );
                break;

            case ImageProcessingType.ConvertToGray:
                Imgproc.cvtColor(frameMatrix, grayMatrix, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.cvtColor(grayMatrix, frameMatrix, Imgproc.COLOR_GRAY2RGBA);
                break;
            }
        }
コード例 #4
0
        // Use this for initialization
        void Start()
        {
//			Utils.setDebugMode(false);
            Texture2D imgTexture = Resources.Load("chessboard") as Texture2D;

            Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC1);

            Utils.texture2DToMat(imgTexture, imgMat);
            Debug.Log("imgMat dst ToString " + imgMat.ToString());


            Imgproc.threshold(imgMat, imgMat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);


            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
コード例 #5
0
        // Update is called once per frame
        void Update()
        {
            if (isPlaying)
            {
                //Loop play
                if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
                {
                    capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
                }

                if (capture.grab())
                {
                    capture.retrieve(rgbMat, 0);

                    Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);

                    Utils.matToTexture2D(rgbMat, texture, colors);
                }
            }
        }
コード例 #6
0
    private Mat GetBWSkinColor()
    {
        var frameHsv = new Mat();

        // Convert from BGR to HSV colorspace
        Imgproc.cvtColor(_cameraImageMat, frameHsv, Imgproc.COLOR_RGB2HSV);
        // Detect the object based on HSV Range Values
        var frameThreshold = new Mat();

        Core.inRange(frameHsv, new Scalar(0, 30, 60), new Scalar(20, 100, 255), frameThreshold);

        var kernelSize      = Mat.ones(10, 10, CvType.CV_8U);
        var frameMorphClose = new Mat();

        Imgproc.morphologyEx(frameThreshold, frameMorphClose, Imgproc.MORPH_CLOSE, kernelSize);
        var frameMorphOpen = new Mat();

        Imgproc.morphologyEx(frameMorphClose, frameMorphOpen, Imgproc.MORPH_OPEN, kernelSize);
        return(frameMorphOpen);
    }
コード例 #7
0
    // Update is called once per frame
    public Mat Processing(Mat img)
    {
        Mat mask = new Mat();

        Core.bitwise_not(img, mask);
        Imgproc.resize(src, src, new Size(mask.cols() / 2, mask.rows() / 2));
        Imgproc.resize(mask, mask, new Size(src.cols(), src.rows()));

        Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(5, 5));

        Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_OPEN, kernel);
        Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_CLOSE, kernel);


        Mat img_masked = new Mat(src.rows(), src.cols(), CvType.CV_8UC4, new Scalar(0, 0, 0, 255));

        //Debug.Log(mask.size());
        //Debug.Log(mask.size());
        //Debug.Log(src.get(1,1).Length);
        //Debug.Log(img_masked.size());

        Core.bitwise_or(src, img_masked, mask);
        for (int i = 0; i < img_masked.cols(); i++)
        {
            for (int j = 0; j < img_masked.rows(); j++)
            {
                if (mask.get(j, i)[0] < 10)
                {
                    double[] ch = src.get(j, i);
                    img_masked.put(j, i, ch[0], ch[1], ch[2], 1);
                    //img_masked.put(j, i, 100, 0, 0, 1);
                }
                else
                {
                    img_masked.put(j, i, 0, 0, 0, 1);
                }
            }
        }

        return(img_masked);
    }
コード例 #8
0
        Mat convert_image(Mat im)
        {
            Mat I = null;

            if (im.channels() == 1)
            {
                if (im.type() != CvType.CV_32F)
                {
                    I = new Mat();
                    im.convertTo(I, CvType.CV_32F);
                }
                else
                {
                    I = im;
                }
            }
            else
            {
                if (im.channels() == 3)
                {
                    Mat img = new Mat();
                    Imgproc.cvtColor(im, img, Imgproc.COLOR_RGBA2GRAY);
                    if (img.type() != CvType.CV_32F)
                    {
                        I = new Mat();
                        img.convertTo(I, CvType.CV_32F);
                    }
                    else
                    {
                        I = img;
                    }
                }
                else
                {
                    Debug.Log("Unsupported image type!");
                }
            }
            Core.add(I, new Scalar(1.0), I);
            Core.log(I, I);
            return(I);
        }
コード例 #9
0
        private void OnDetectionDone()
        {
            if (displayCameraPreview)
            {
                Imgproc.cvtColor(downScaleFrameMat, rgbMat4preview, Imgproc.COLOR_RGBA2RGB);

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) {
                                    // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                    Aruco.drawAxis(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                                }
                        }
                    }
                }

                Utils.fastMatToTexture2D(rgbMat4preview, texture);
            }

            if (applyEstimationPose)
            {
                if (hasUpdatedARTransformMatrix)
                {
                    hasUpdatedARTransformMatrix = false;

                    // Apply the cameraToWorld matrix with the Z-axis inverted.
                    ARM = arCamera.cameraToWorldMatrix * invertZM * ARM;

                    ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                }
            }

            isDetecting = false;
        }
コード例 #10
0
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame() && initialized)
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();
            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
            bgMat.copyTo(dstMat);

            Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);
            grayMat.get(0, 0, grayPixels);

            for (int i = 0; i < grayPixels.Length; i++)
            {
                maskPixels [i] = 0;
                if (grayPixels [i] < 70)
                {
                    grayPixels [i] = 0;
                    maskPixels [i] = 1;
                }
                else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                {
                    grayPixels [i] = 100;
                }
                else
                {
                    grayPixels [i] = 255;
                    maskPixels [i] = 1;
                }
            }

            grayMat.put(0, 0, grayPixels);
            maskMat.put(0, 0, maskPixels);
            grayMat.copyTo(dstMat, maskMat);

            Imgproc.Canny(lineMat, lineMat, 20, 120);
            lineMat.copyTo(maskMat);
            Core.bitwise_not(lineMat, lineMat);
            lineMat.copyTo(dstMat, maskMat);
            Utils.matToTexture2D(dstMat, texture);             //, webCamTextureToMatHelper.GetBufferColors());
        }
    }
コード例 #11
0
    public bool track(Mat im, FaceTrackerParams p)
    {
        if (points.Count <= 0)
        {
            return(false);
        }


        //convert image to greyscale
        Mat gray = null;

        if (im.channels() == 1)
        {
            gray = im;
        }
        else
        {
            gray = new Mat();
            Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
        }

        //initialise
//				if (!tracking)
//						points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize);

        for (int i = 0; i < points.Count; i++)
        {
            if (points [i].Length != smodel.npts())
            {
                return(false);
            }

            //fit
            for (int level = 0; level < p.ssize.Count; level++)
            {
                points [i] = fit(gray, points [i], p.ssize [level], p.robust, p.itol, p.ftol);
            }
        }

        return(true);
    }
コード例 #12
0
        // Use this for initialization
        void Start()
        {
            Texture2D imgTexture  = Resources.Load("lena") as Texture2D;
            Texture2D tempTexture = Resources.Load("template") as Texture2D;
            Mat       imgMat      = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
            Mat       tempMat     = new Mat(tempTexture.height, tempTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(imgTexture, imgMat);
            Utils.texture2DToMat(tempTexture, tempMat);


            //Create the result mat
            int result_cols = imgMat.cols() - tempMat.cols() + 1;
            int result_rows = imgMat.rows() - tempMat.rows() + 1;
            Mat result      = new Mat(result_rows, result_cols, CvType.CV_32FC1);

            int match_method = Imgproc.TM_CCOEFF_NORMED;


            Imgproc.matchTemplate(imgMat, tempMat, result, match_method);

            Imgproc.threshold(result, result, 0.8, 1.0, Imgproc.THRESH_TOZERO); //threshold = 0.8

            for (int i = 0; i < result.rows(); i++)
            {
                for (int j = 0; j < result.cols(); j++)
                {
                    if (result.get(i, j) [0] > 0)
                    {
                        Imgproc.rectangle(imgMat, new Point(j, i), new Point(j + tempMat.cols(), i + tempMat.rows()), new Scalar(255, 0, 0, 255), 2);
                        Debug.Log("value" + result.get(i, j) [0]);
                    }
                }
            }

            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
コード例 #13
0
 //畫地圖
 public void DrawMap()
 {
     //跑全部地圖
     for (int i = 0; i < ScreenHeightBlock; i++)
     {
         for (int j = 0; j < ScreenWidthBlock; j++)
         {
             //如果可以走就畫出來
             if (_mapData.isExistCanMoveArea(new Point(j, i)))
             {
                 DrawMazeBlock(j, i, _mapData.getWall(j, i));
             }
             else if (_isFullMap)
             {
                 DrawMazeBlock(j, i, _mapData.getWall(j, i));
             }
         }
     }
     //劃出地圖邊界
     Imgproc.rectangle(_mapMat, new Point(0, 0), new Point(_mapMat.width() - 1, _mapMat.height() - 1), _mapWellColor, _mapWellThickness);
 }
コード例 #14
0
    public void Sharpen()
    {
        warpedTexture = new Texture2D(previewRawImage.mainTexture.width, previewRawImage.mainTexture.height, TextureFormat.RGB24, false);
        Graphics.CopyTexture(previewRawImage.texture, warpedTexture);

        Mat initMat = new Mat(warpedTexture.height, warpedTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(warpedTexture, initMat);
        Mat finalMat = new Mat(warpedTexture.height, warpedTexture.width, CvType.CV_8UC3);

        Imgproc.GaussianBlur(initMat, finalMat, new Size(0, 0), 3);

        Core.addWeighted(initMat, 1.5, finalMat, -.5, 0, finalMat);

        Utils.matToTexture2D(finalMat, warpedTexture);
        initMat.Dispose();
        finalMat.Dispose();
        previewRawImage.texture = warpedTexture;
        warpedTexture           = null;
        System.GC.Collect();
    }
コード例 #15
0
        /// <summary>
        /// Raises the save button event.
        /// </summary>
        public void OnSaveButton()
        {
            if (patternRawImage.texture != null)
            {
                Texture2D patternTexture = (Texture2D)patternRawImage.texture;
                Mat       patternMat     = new Mat(patternRect.size(), CvType.CV_8UC3);
                Utils.texture2DToMat(patternTexture, patternMat);
                Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_RGB2BGR);

                string savePath = Application.persistentDataPath;
                Debug.Log("savePath " + savePath);

                Imgcodecs.imwrite(savePath + "/patternImg.jpg", patternMat);

                #if UNITY_5_3 || UNITY_5_3_OR_NEWER
                SceneManager.LoadScene("WebCamTextureMarkerLessARExample");
                #else
                Application.LoadLevel("WebCamTextureMarkerLessARExample");
                #endif
            }
        }
コード例 #16
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);


                detector.detect(rgbaMat, keypoints);
//                Debug.Log ("keypoints.ToString() " + keypoints.ToString());
                Features2d.drawKeypoints(rgbMat, keypoints, rgbaMat, Scalar.all(-1), Features2d.NOT_DRAW_SINGLE_POINTS);



                Imgproc.rectangle(rgbaMat, patternRect.tl(), patternRect.br(), new Scalar(255, 0, 0, 255), 5);


                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
コード例 #17
0
    public void CalculateThreshold(Texture2D srcTexture) //
    {
        Texture2D imgTexture = ToTexture2D(camRenderTexture);

        Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC1);

        Utils.texture2DToMat(imgTexture, imgMat);
        //Debug.Log("imgMat.ToString() " + imgMat.ToString());
        Imgproc.threshold(imgMat, imgMat, 1, 255, Imgproc.THRESH_BINARY);

        // draw
        Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(imgMat, texture);

        if (AbleVisualSurface)
        {
            VisualisationSurface.SetActive(true);
            VisualisationSurface.GetComponent <Renderer>().material.mainTexture = texture;
        }
    }
コード例 #18
0
    double compareContours(List <MatOfPoint> sContours, List <MatOfPoint> dContours)
    {
        double count;

        count = 0;
        double result = 0;

        for (int i = 0; i < sContours.Count; i++)
        {
            for (int j = 0; j < dContours.Count; j++)
            {
                double returnVal = Imgproc.matchShapes(sContours [i], dContours [j], Imgproc.CV_CONTOURS_MATCH_I1, 0);
                if (returnVal < threshold)
                {
                    count++;
                }
                result = count / (dContours.Count * sContours.Count);
            }
        }
        return(result);
    }
コード例 #19
0
    float GetDistanceToWall(Point pt_start, Point pt_end)
    {
        float distance = 0;
        int   check_x  = 0;
        int   check_y  = 0;

        for (int i = (int)pt_start.y; i >= (int)pt_end.y; i--)
        {
            check_x = (int)Math.Round(pt_start.x + ((pt_end.x - pt_start.x) / (pt_start.y - pt_end.y) * (pt_start.y - i)), 1);
            check_y = i;
            double[] buff = cameraMat.get(check_y, check_x);
            if (buff[0] != 0)
            {
                break;
            }
            distance++;
        }
        Imgproc.circle(cameraMat, new Point(check_x, check_y), 2, new Scalar(100), 1);
        //Debug.Log((check_x, check_y, distance));
        return(distance);
    }
コード例 #20
0
ファイル: SavePNG.cs プロジェクト: srini196/Photo3D
 //prep watermark
 void Start()
 {
     // convert watermark texture to Mat
     Debug.Log("converting watermake texture to mat");
     watermarkMat = Util.toMat(watermark, CvType.CV_8UC4);
     //Imgproc.cvtColor(watermarkMat, watermarkMat, Imgproc.COLOR_BGR2RGBA);
     if (watermarkDepth)
     {
         Debug.Log("Converting watermake texture to mat");
         watermarkDepthMat = Util.toMat(watermarkDepth, CvType.CV_8UC4);
         //Imgproc.cvtColor(watermarkMat, watermarkMat, Imgproc.COLOR_BGR2RGBA);
     }
     else
     {
         Debug.Log("Using the rgb to compute a depth watermark");
         Mat waterGray = new Mat();
         Imgproc.cvtColor(watermarkMat, waterGray, Imgproc.COLOR_RGBA2GRAY);
         Imgproc.threshold(waterGray, watermarkDepthMat, 1, 255, Imgproc.THRESH_BINARY);
     }
     /////////////
 }
コード例 #21
0
        void Rectify(ref Point[] face_point_array, int i)
        {
            rectMat_array[i] = new Mat(360, 640, CvType.CV_8UC1);

            Point[] reg_point_array = new Point[4];
            reg_point_array[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
            reg_point_array[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
            reg_point_array[2] = new Point(0.0, 0.0);
            reg_point_array[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

            MatOfPoint2f srcPoints = new MatOfPoint2f(face_point_array);
            MatOfPoint2f regPoints = new MatOfPoint2f(reg_point_array);

            // Debug.LogFormat("Rectify Face Points; {0} \n {1} \n {2} \n {3}",
            // face_point_array[0], face_point_array[1], face_point_array[2], face_point_array[3]);

            // Creating the H Matrix
            Mat Homo_Mat = Calib3d.findHomography(srcPoints, regPoints);

            Imgproc.warpPerspective(cached_initMat, rectMat_array[i], Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
        }
コード例 #22
0
        /// <summary>
        /// Updates preview object with new captured image
        /// </summary>
        /// <param name="texture">The new image that got captured.</param>
        public void OnImageCaptured(Texture2D texture)
        {
            // Convert Texture to Mat and store as cached_initMat
            cached_bigMat  = new Mat(1080, 1920, CvType.CV_8UC1);
            cached_initMat = new Mat(360, 640, CvType.CV_8UC1);

            Utils.texture2DToMat(texture, cached_bigMat, false, 0);
            Imgproc.resize(cached_bigMat, cached_initMat, new Size(640, 360), 1.0 / SCALE_FACTOR, 1.0 / SCALE_FACTOR, 1);

            out_texture = new Texture2D(640, 360, TextureFormat.RGBA32, false);

            // Finds existing screen points
            SetC2ScreenPoints();
            DrawC2ScreenPoints(ref cached_initMat);

            GetFaces(ref c2_point_array);
            ShowFaces();

            // outMat = cached_initMat;
            // ShowMat(ref outMat);
        }
コード例 #23
0
        private void Start()
        {
            var src = Util.LoadTexture("imori_256x256");
            var dst = new Mat(src.rows(), src.cols(), CvType.CV_8UC4);
            var k_v = new[]
            {
                1f, 2f, 1f,
                0f, 0f, 0f,
                -1f, -2f, -1f
            };
            var k_h = new[]
            {
                1f, 0f, -1f,
                2f, 0f, -2f,
                1f, 0f, -1f
            };

            Imgproc.cvtColor(src, dst, Imgproc.COLOR_RGBA2GRAY);
            Imgproc.filter2D(dst, dst, -1, new MatOfFloat(k_v));
            GetComponent <Renderer>().material.mainTexture = Util.MatToTexture2D(dst);
        }
コード例 #24
0
        /// <summary>
        /// Calculate horizontal offset of the vertical line (center) in the binary image
        /// </summary>
        /// <param name="src"> binary image </param>
        /// <returns> horizontal offset of the vertical line </returns>
        private int CenterOfLine(Mat src)
        {
            // sobel in X-direction
            //Image<Gray, float> sobel = src.Clone().Erode(3).Sobel(1, 0, 3);
            Mat sobel = new Mat(src.Size(), src.Type());

            Imgproc.Erode(src, sobel, Imgproc.GetStructuringElement(Imgproc.MorphRect, new Size(3, 3)));
            //Imgproc.Sobel(src, sobel, src.Type(), 0, 3);

            // min max loc
            double min = 0, max = 0;

            Core.Core.MinMaxLocResult result = Core.Core.MinMaxLoc(sobel);// MinMaxLoc(sobel, ref min, ref max, ref minLoc, ref maxLoc);

            // invalid state
            if (result.MinLoc.X <= result.MaxLoc.X)
            {
                return(src.Width() / 2);
            }
            return(((int)(result.MinLoc.X) - (int)(result.MaxLoc.X)) / 2 + (int)(result.MaxLoc.X));
        }
コード例 #25
0
        /// <summary>
        /// Draws the pred.
        /// </summary>
        /// <param name="classId">Class identifier.</param>
        /// <param name="conf">Conf.</param>
        /// <param name="left">Left.</param>
        /// <param name="top">Top.</param>
        /// <param name="right">Right.</param>
        /// <param name="bottom">Bottom.</param>
        /// <param name="frame">Frame.</param>
        protected virtual void drawPred(int classId, float conf, double left, double top, double right, double bottom, Mat frame)
        {
            Imgproc.rectangle(frame, new Point(left, top), new Point(right, bottom), new Scalar(0, 255, 0, 255), 2);

            string label = conf.ToString();
            if (classNames != null && classNames.Count != 0)
            {
                if (classId < (int)classNames.Count)
                {
                    label = classNames[classId] + ": " + label;
                }
            }

            int[] baseLine = new int[1];
            Size labelSize = Imgproc.getTextSize(label, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);

            top = Mathf.Max((float)top, (float)labelSize.height);
            Imgproc.rectangle(frame, new Point(left, top - labelSize.height),
                new Point(left + labelSize.width, top + baseLine[0]), Scalar.all(255), Core.FILLED);
            Imgproc.putText(frame, label, new Point(left, top), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0, 255));
        }
コード例 #26
0
    // Update is called once per frame
    void Update()
    {
        MatDisplay.SetCameraFoV(41.5f);

        Image cameraImageRaw = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (cameraImageRaw != null)
        {
            if (cameraImageMat == null)
            {
                // Rows first, then columns.
                cameraImageMat = new Mat(cameraImageRaw.Height, cameraImageRaw.Width, CvType.CV_8UC4);
                grayScale      = new Mat(cameraImageRaw.Height, cameraImageRaw.Width, CvType.CV_8UC4);
            }

            byte[] pixels = cameraImageRaw.Pixels;
            cameraImageMat.put(0, 0, pixels);
            Imgproc.cvtColor(cameraImageMat, grayScale, Imgproc.COLOR_RGB2GRAY);
            MatDisplay.DisplayMat(grayScale, MatDisplaySettings.FULL_BACKGROUND);
        }
    }
コード例 #27
0
    private void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        float[] cameraToWorldMatrixAsFloat;
        float[] projectionMatrixAsFloat;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false || sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
        {
            Debug.Log("Failed to get camera to world or projection matrix");
            return;
        }

        _cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
        _projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);

        sample.Dispose();

        Mat frameBGRA = new Mat(_resolution.height, _resolution.width, CvType.CV_8UC4);

        frameBGRA.put(0, 0, _latestImageBytes);
        Mat frameBGR = new Mat(_resolution.height, _resolution.width, CvType.CV_8UC3);

        Imgproc.cvtColor(frameBGRA, frameBGR, Imgproc.COLOR_BGRA2BGR);

        Mat HSV       = new Mat();
        Mat threshold = new Mat();

        // Track objects
        foreach (ObjectTracker ot in _trackers)
        {
            Imgproc.cvtColor(frameBGR, HSV, Imgproc.COLOR_BGR2HSV);
            Core.inRange(HSV, new Scalar(ot.minH, ot.minSaturation, ot.minLight), new Scalar(ot.maxH, 255, 255), threshold);
            morphOps(threshold);
            trackFilteredObject(ot, threshold);
        }
    }
コード例 #28
0
    // Start is called before the first frame update
    void Start()
    {
        Texture2D sourceTexture = Resources.Load("maker") as Texture2D;
        Mat       inputMat      = new Mat(sourceTexture.height, sourceTexture.width, CvType.CV_8UC4);

        Utils.texture2DToMat(sourceTexture, inputMat);
        UnityEngine.Debug.Log("inputMat.ToString() " + inputMat.ToString());

        Mat src_mat = new Mat(4, 1, CvType.CV_32FC2);
        Mat dst_mat = new Mat(4, 1, CvType.CV_32FC2);

        Mat outputMat = inputMat.clone();

        src_mat.put(0, 0, 0.0, 0.0, sourceTexture.width, 0.0, 0.0, sourceTexture.height, sourceTexture.width, sourceTexture.height);
        dst_mat.put(0, 0, 0.0, 0.0, sourceTexture.width, 100.0, 0.0, sourceTexture.height, sourceTexture.width, sourceTexture.height);

        Mat perspectiveTransform = Imgproc.getPerspectiveTransform(src_mat, dst_mat);

        Imgproc.warpPerspective(inputMat, outputMat, perspectiveTransform, new Size(sourceTexture.width, sourceTexture.height));

        Texture2D outputTexture = new Texture2D(outputMat.cols(), outputMat.rows(), TextureFormat.RGBA32, false);
        Texture2D inputTexture  = new Texture2D(inputMat.cols(), inputMat.rows(), TextureFormat.RGBA32, false);

        #region CIRCLE POINT
        Imgproc.circle(inputMat, new Point(0, 0), 4, new Scalar(255, 0, 0), 8);
        Imgproc.circle(inputMat, new Point(sourceTexture.width, 0), 4, new Scalar(255, 0, 0), 8);
        Imgproc.circle(inputMat, new Point(0, sourceTexture.height), 4, new Scalar(255, 0, 0), 8);
        Imgproc.circle(inputMat, new Point(sourceTexture.width, sourceTexture.height), 4, new Scalar(255, 0, 0), 8);

        Imgproc.circle(outputMat, new Point(0, 0), 4, new Scalar(0, 0, 255), 8);
        Imgproc.circle(outputMat, new Point(sourceTexture.width, 100), 4, new Scalar(0, 0, 255), 8);
        Imgproc.circle(outputMat, new Point(0, sourceTexture.height), 4, new Scalar(0, 0, 255), 8);
        Imgproc.circle(outputMat, new Point(sourceTexture.width, sourceTexture.height), 4, new Scalar(0, 0, 255), 8);
        #endregion

        Utils.matToTexture2D(outputMat, outputTexture);
        Utils.matToTexture2D(inputMat, inputTexture);
        perQuad.GetComponent <Renderer>().material.mainTexture = outputTexture;
        oriQuad.GetComponent <Renderer>().material.mainTexture = inputTexture;
    }
コード例 #29
0
        /// <summary>
        /// Finds the foreground mask mat.
        /// </summary>
        /// <param name="fgMat">Fg mat.</param>
        /// <param name="bgMat">Background mat.</param>
        /// <param name="thresh">Thresh.</param>
        private void findFgMaskMat(Mat fgMat, Mat bgMat, float thresh = 13.0f)
        {
            Mat diff1 = new Mat();

            Core.absdiff(fgMat, bgMat, diff1);
            Mat diff2 = new Mat();

            Core.absdiff(bgMat, fgMat, diff2);
            Mat diff = diff1 + diff2;

            Imgproc.threshold(diff, diff, thresh, 0, Imgproc.THRESH_TOZERO);

            Imgproc.cvtColor(diff, fgMaskMat, Imgproc.COLOR_RGBA2GRAY);

            Imgproc.threshold(fgMaskMat, fgMaskMat, 10, 0, Imgproc.THRESH_TOZERO);

            Imgproc.threshold(fgMaskMat, fgMaskMat, 0, 255, Imgproc.THRESH_BINARY);

            diff1.Dispose();
            diff2.Dispose();
            diff.Dispose();
        }
        private void Run()
        {
            Texture2D imgTexture = Resources.Load("lena") as Texture2D;

            Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(imgTexture, imgMat);
            Debug.Log("imgMat.ToString() " + imgMat.ToString());


            Mat grayMat = new Mat();

            Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
            Imgproc.equalizeHist(grayMat, grayMat);


            MatOfRect faces = new MatOfRect();

            if (cascade != null)
            {
                cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2,
                                         new Size(20, 20), new Size());
            }

            OpenCVForUnity.CoreModule.Rect[] rects = faces.toArray();
            for (int i = 0; i < rects.Length; i++)
            {
                Debug.Log("detect faces " + rects [i]);

                Imgproc.rectangle(imgMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
            }


            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }