Пример #1
0
        public static Mat perspectiveAlign(Mat src, Mat dst)
        {
            //Parameters for perspective wrapping
            //MatOfPoint2f srcQuad = new MatOfPoint2f(new Point(-80, 150), new Point(570, 150), new Point(55, 440), new Point(425, 440));
            //MatOfPoint2f dstQuad = new MatOfPoint2f(new Point(0, 0), new Point(480, 0), new Point(0, 370), new Point(480, 370));

            //  Texture2D tex1 = new Texture2D(src.width(), src.height(), TextureFormat.RGB24, false);
            //  Utils.matToTexture2D(src, tex1);
            //  byte[] bytes1 = tex1.EncodeToJPG();
            //  System.IO.File.WriteAllBytes("D:/Patchs/" + yy + ".jpg", bytes1);
            //  yy++;

            // FOR THE NEW BASE
            // MatOfPoint2f srcQuad = new MatOfPoint2f(new Point(-20, 185), new Point(460, 185), new Point(90, 500), new Point(360, 500));
            // MatOfPoint2f dstQuad = new MatOfPoint2f(new Point(0, 0), new Point(480, 0), new Point(0, 640), new Point(480, 640));

            MatOfPoint2f srcQuad = new MatOfPoint2f(new Point(-60, 80), new Point(540, 80), new Point(45, 370), new Point(435, 370));
            MatOfPoint2f dstQuad = new MatOfPoint2f(new Point(0, 0), new Point(480, 0), new Point(0, 380), new Point(480, 380));

            Mat M = Imgproc.getPerspectiveTransform(srcQuad, dstQuad);

            Imgproc.warpPerspective(src, dst, M, new Size(src.width(), src.height()));
            Imgproc.resize(dst, dst, new Size(src.width(), 550));
            return(dst);
        }
        public void ProcessFinger(Mat rgbaImage)
        {
            Imgproc.pyrDown(rgbaImage, mPyrDownMat);
            Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
            Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA);
            Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb);

            Core.inRange(mHsvMat, fLowerBoundHSV, fUpperBoundHSV, fMaskHSV);

            fMask = fMaskHSV;

            Imgproc.dilate(fMask, fDilatedMask, new Mat());

            List <MatOfPoint> contoursFinger = new List <MatOfPoint>();

            Imgproc.findContours(fDilatedMask, contoursFinger, fHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            if (contoursFinger.Count == 0)
            {
                FingerContour = null;
                return;
            }

            // Find max contour area
            double     maxArea        = 0;
            MatOfPoint biggestContour = null;

            foreach (MatOfPoint each in contoursFinger)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.contourArea(wrapper);
                if (area > maxArea)
                {
                    maxArea        = area;
                    biggestContour = each;
                }
            }
            if (maxArea < 130)
            {
                FingerContour = null;
                return;
            }

            //Debug.Log("Finger contour area" + maxArea.ToString());

            MatOfPoint2f contours_res2f = new MatOfPoint2f();

            MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray());

            Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true);
            FingerContour = new MatOfPoint(contours_res2f.toArray());
            contours_res2f.Dispose();
            biggestContour2f.Dispose();
            if (Imgproc.contourArea(FingerContour) > mMinContourArea * maxArea)
            {
                Core.multiply(FingerContour, new Scalar(4, 4), FingerContour);
            }
        }
Пример #3
0
        public static bool isHeart(List <Point> shape)
        {
            //Check number of vertices
            if (shape.Count < 20)
            {
                return(false);
            }

            MatOfPoint shape_area = new MatOfPoint();

            shape_area.fromList(shape);

            MatOfPoint2f shape_area2f = new MatOfPoint2f(shape_area.toArray());

            //   if (Imgproc.contourArea(shape_area) > 6000)
            //       return false;

            double area  = Imgproc.contourArea(shape_area);
            double perim = Imgproc.arcLength(shape_area2f, true);
            double ratio = area / perim;

            if (ratio < 18 || ratio > 23)
            {
                return(false);
            }

            for (int i = 1; i < shape.Count; i++)
            {
                if (distanceTwoPoints(shape[i - 1], shape[i]) > 20)
                {
                    return(true);
                }
            }
            return(false);
        }
Пример #4
0
    // Start is called before the first frame update
    void Start()
    {
        var devices = WebCamTexture.devices;

        webCamDevice  = devices[0];
        webCamTexture = new WebCamTexture(webCamDevice.name);
        webCamTexture.Play();

        while (true)
        {
            if (webCamTexture.didUpdateThisFrame)
            {
                OnInited();
                break;
            }
        }

        List <Point> points = new List <Point>();

        points.Add(new Point(0, 260));
        points.Add(new Point(880, 260));
        points.Add(new Point(880, 0));
        points.Add(new Point(0, 0));
        MatOfPoint2f pts2 = new MatOfPoint2f();

        pts2.fromList(points);
        double[] p1_1 = pts2.get(0, 0);
        print(string.Join(" ", p1_1));
        double[] p3_1 = pts2.get(2, 0);
        print(string.Join(" ", p3_1));
        double[] p2_1 = pts2.get(1, 0);
        print(string.Join(" ", p2_1));
    }
        // Use this for initialization
        void Start()
        {
            displayCameraPreviewToggle.isOn = displayCameraPreview;
            useSeparateDetectionToggle.isOn = useSeparateDetection;
            displayAxesToggle.isOn          = displayAxes;
            displayHeadToggle.isOn          = displayHead;
            displayEffectsToggle.isOn       = displayEffects;

            imageOptimizationHelper  = gameObject.GetComponent <ImageOptimizationHelper> ();
            webCamTextureToMatHelper = gameObject.GetComponent <HololensCameraStreamToMatHelper> ();
            #if NETFX_CORE
            webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired;
            #endif
            webCamTextureToMatHelper.Initialize();

            rectangleTracker = new RectangleTracker();
//            faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("sp_human_face_68.dat"));
            faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("sp_human_face_68_for_mobile.dat"));

            // The coordinates of the detection object on the real world space connected with the pixel coordinates.(mm)
            objectPoints = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 120), //nose (Nose top)
                new Point3(-26, 15, 83),  //l mouse (Mouth breadth)
                new Point3(26, 15, 83),   //r mouse (Mouth breadth)
                new Point3(-79, 90, 0.0), //l ear (Bitragion breadth)
                new Point3(79, 90, 0.0)   //r ear (Bitragion breadth)
                );

            imagePoints = new MatOfPoint2f();
            rotMat      = new Mat(3, 3, CvType.CV_64FC1);
        }
        // Use this for initialization
        void Start()
        {
            //set 3d face object points.
            objectPoints = new MatOfPoint3f(new Point3(-31, 72, 86),            //l eye
                                            new Point3(31, 72, 86),             //r eye
                                            new Point3(0, 40, 114),             //nose
                                            new Point3(-20, 15, 90),            //l mouse
                                            new Point3(20, 15, 90)              //r mouse
//			                                                                                                                                                  ,
//			                                                                                                                                                  new Point3 (-70, 60, -9),//l ear
//			                                                                                                                                                  new Point3 (70, 60, -9)//r ear
                                            );
            imagePoints = new MatOfPoint2f();
            rvec        = new Mat();
            tvec        = new Mat();
            rotM        = new Mat(3, 3, CvType.CV_64FC1);

            //initialize FaceTracker
            faceTracker = new FaceTracker(Utils.getFilePath("tracker_model.json"));
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();
            webCamTextureToMatHelper.Init();

            autoResetModeToggle.isOn = autoResetMode;
        }
Пример #7
0
    public bool GetPosition(Mat frame, bool isKeyboardFound)
    {
        Mat frameProc = new Mat(); //frame.rows(), frame.cols(), CvType.CV_16UC3
        Mat frameMask = new Mat();
        Mat hierarchy = new Mat();

        Imgproc.cvtColor(frame, frameProc, Imgproc.COLOR_BGR2HSV);
        Scalar lowerB = new Scalar(HueLower, SatLower, ValLower);
        Scalar upperB = new Scalar(HueUpper, SatUpper, ValUpper);

        Core.inRange(frameProc, lowerB, upperB, frameMask);
        Core.bitwise_and(frame, frame, frameProc, frameMask);
        //Imgproc.bilateralFilter(frameProc, frameProc, 9, 50, 100);
        Imgproc.morphologyEx(frameProc, frameProc, 2, Mat.ones(5, 5, CvType.CV_8U)); //
        Imgproc.dilate(frameProc, frameProc, Mat.ones(5, 5, CvType.CV_8U));          //Mat.ones(5, 5, CvType.CV_8U), anchor: new Point(-1, -1), iteration:2
        Imgproc.cvtColor(frameProc, frameProc, Imgproc.COLOR_BGR2GRAY);

        List <MatOfPoint> contoursList = new List <MatOfPoint>();

        Imgproc.findContours(frameProc, contoursList, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);

        int count = 0;

        foreach (MatOfPoint contour in contoursList)
        {
            MatOfPoint2f approx   = new MatOfPoint2f();
            MatOfPoint2f contourf = new MatOfPoint2f(contour.toArray());
            Imgproc.approxPolyDP(contourf, approx, 0.01 * Imgproc.arcLength(contourf, true), true);
            //print(approx.dump());
            if (approx.rows() == 4 && Imgproc.contourArea(contour) >= min_area)
            {
                count++;
                if (count >= 2)
                {
                    continue;
                }
                else
                {
                    OpenCVForUnity.CoreModule.Rect track_win = Imgproc.boundingRect(approx);
                    TrackWindow = new int[] { track_win.x, track_win.y, track_win.width, track_win.height };
                    if (frame.height() - 5 < TrackWindow[0] + TrackWindow[2] &&
                        TrackWindow[0] + TrackWindow[2] <= frame.height() ||
                        0 <= TrackWindow[0] && TrackWindow[0] < 5 ||
                        frame.width() - 5 < TrackWindow[1] + TrackWindow[3] &&
                        TrackWindow[1] + TrackWindow[3] <= frame.width() ||
                        0 <= TrackWindow[1] && TrackWindow[1] < 5)
                    {
                        continue;
                    }
                    else
                    {
                        Approx  = approx;
                        Contour = contour;
                        return(isKeyboardFound = true);
                    }
                }
            }
        }
        return(isKeyboardFound = false);
    }
Пример #8
0
    private Scalar colorRed = new Scalar(255, 0, 0, 125); // Red color

    void Start()
    {
        for (int i = 0; i < WebCamTexture.devices.Length; i++)
        {
            Debug.Log(WebCamTexture.devices[i].name);
        }

        mCamera       = new WebCamTexture();
        matOpFlowThis = new Mat();
        matOpFlowPrev = new Mat();
        MOPcorners    = new MatOfPoint();
        mMOP2fptsThis = new MatOfPoint2f();
        mMOP2fptsPrev = new MatOfPoint2f();
        mMOP2fptsSafe = new MatOfPoint2f();
        mMOBStatus    = new MatOfByte();
        mMOFerr       = new MatOfFloat();

        mCamera.Play();

        rgbaMat = new Mat(mCamera.height, mCamera.width, CvType.CV_8UC4);
        texture = new Texture2D(mCamera.width, mCamera.height, TextureFormat.RGBA32, false);
        colors  = new Color32[mCamera.width * mCamera.height];

        GetComponent <Renderer>().material.mainTexture = texture;
    }
Пример #9
0
    public virtual void Draw(Mat src, Mat dst)
    {
        var points      = new MatOfPoint2f();
        var patternSize = new Size((int)SizeX, (int)SizeY);

        var found = false;

        switch (boardType)
        {
        case BoardType.ChessBoard:
            found = Calib3d.findChessboardCorners(src, patternSize, points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE);

            break;

        case BoardType.CirclesGrid:
            found = Calib3d.findCirclesGrid(src, patternSize, points, Calib3d.CALIB_CB_SYMMETRIC_GRID);
            break;

        case BoardType.AsymmetricCirclesGrid:
            found = Calib3d.findCirclesGrid(src, patternSize, points, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
            break;
        }

        if (found)
        {
            if (boardType == BoardType.ChessBoard)
            {
                Imgproc.cornerSubPix(src, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1));
            }

            Calib3d.drawChessboardCorners(dst, patternSize, points, found);
        }
    }
Пример #10
0
        public void FishEyeCalibrate()
        {
            var patternSize = new Size(10, 7);

            using (var image = Image("calibration/00.jpg"))
                using (var corners = new MatOfPoint2f())
                {
                    Cv2.FindChessboardCorners(image, patternSize, corners);

                    var objectPointsArray = Create3DChessboardCorners(patternSize, 1.0f).ToArray();
                    var imagePointsArray  = corners.ToArray();

                    using (var objectPoints = MatOfPoint3f.FromArray(objectPointsArray))
                        using (var imagePoints = MatOfPoint2f.FromArray(imagePointsArray))
                            using (var cameraMatrix = new MatOfDouble(Mat.Eye(3, 3, MatType.CV_64FC1)))
                                using (var distCoeffs = new MatOfDouble())
                                {
                                    var rms = Cv2.FishEye.Calibrate(new[] { objectPoints }, new[] { imagePoints }, image.Size(), cameraMatrix,
                                                                    distCoeffs, out var rotationVectors, out var translationVectors,
                                                                    FishEyeCalibrationFlags.None);

                                    var distCoeffValues = distCoeffs.ToArray();
                                    Assert.Equal(55.15, rms, 2);
                                    Assert.Contains(distCoeffValues, d => Math.Abs(d) > 1e-20);
                                    Assert.NotEmpty(rotationVectors);
                                    Assert.NotEmpty(translationVectors);
                                }
                }
        }
Пример #11
0
    private void FindHomographyPoints(out MatOfPoint2f matDst, out MatOfPoint2f matObj)
    {
        var corner1ScreenPoint = Cam.WorldToScreenPoint(Corner1.position);
        var corner2ScreenPoint = Cam.WorldToScreenPoint(Corner2.position);
        var corner3ScreenPoint = Cam.WorldToScreenPoint(Corner3.position);
        var corner4ScreenPoint = Cam.WorldToScreenPoint(Corner4.position);

        corner1ScreenPoint.y = Cam.pixelHeight - corner1ScreenPoint.y;
        corner2ScreenPoint.y = Cam.pixelHeight - corner2ScreenPoint.y;
        corner3ScreenPoint.y = Cam.pixelHeight - corner3ScreenPoint.y;
        corner4ScreenPoint.y = Cam.pixelHeight - corner4ScreenPoint.y;

        var srcPoints = new List <Point>
        {
            new Point(corner2ScreenPoint.x, corner2ScreenPoint.y),
            new Point(corner1ScreenPoint.x, corner1ScreenPoint.y),
            new Point(corner4ScreenPoint.x, corner4ScreenPoint.y),
            new Point(corner3ScreenPoint.x, corner3ScreenPoint.y),
        };
        var dstPoints = new List <Point>
        {
            new Point(0, _drawingPlaceMat.height()),
            new Point(_drawingPlaceMat.width(), _drawingPlaceMat.height()),
            new Point(_drawingPlaceMat.width(), 0),
            new Point(0, 0),
        };

        matObj = new MatOfPoint2f(srcPoints.ToArray());
        matDst = new MatOfPoint2f(dstPoints.ToArray());
    }
Пример #12
0
        private void Run()
        {
            //set 3d face object points.
            objectPoints = new MatOfPoint3f(new Point3(-31, 72, 86), //l eye
                                            new Point3(31, 72, 86),  //r eye
                                            new Point3(0, 40, 114),  //nose
                                            new Point3(-20, 15, 90), //l mouse
                                            new Point3(20, 15, 90)   //r mouse
//                                                                                                                                                            ,
//                                                                                                                                                            new Point3 (-70, 60, -9),//l ear
//                                                                                                                                                            new Point3 (70, 60, -9)//r ear
                                            );
            imagePoints = new MatOfPoint2f();
            rvec        = new Mat();
            tvec        = new Mat();
            rotM        = new Mat(3, 3, CvType.CV_64FC1);

            //initialize FaceTracker
            faceTracker = new FaceTracker(tracker_model_json_filepath);
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            cascade = new CascadeClassifier();
            cascade.load(haarcascade_frontalface_alt_xml_filepath);
//            if (cascade.empty())
//            {
//                Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//            }



            webCamTextureToMatHelper.Initialize();
        }
Пример #13
0
        // Use this for initialization
        void Start()
        {
            // AppControl.control.Load();

            roiPointList = new List <Point> ();
            termination  = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1);

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();
            webCamTextureToMatHelper.Init(OnWebCamTextureToMatHelperInited, OnWebCamTextureToMatHelperDisposed);

            // Calibration variables
            patternsize         = new Size(9, 6);     // (<param1>, <param2>) : new size with width = <param1> and height = <param2>
            squaresize          = 22f;
            pointbuf            = new MatOfPoint2f(); //this will be filled by the detected corners of findChessboardCorners
            objectpointbuf      = new MatOfPoint3f(); //this will be filled by the detected corners of findChessboardCorners
            imagePoints         = new List <Mat>();
            objectPoints        = new List <Mat>();
            calibrationComplete = false;

            imageSize    = new Size(480, 640);
            cameraMatrix = new Mat();
            distCoeffs   = new Mat();
            rvecs        = new List <Mat>();
            tvecs        = new List <Mat>();

            // Set the "lastInterval" to the system time
            lastInterval = Time.realtimeSinceStartup;
        }
Пример #14
0
        public void BuildPatternFromImage(Mat image, Pattern pattern)
        {
            // Store original image in pattern structure
            pattern.size  = new Size(image.Cols, image.Rows);
            pattern.frame = image.Clone();
            GetGray(image, pattern.grayImg);

            // Build 2d and 3d contours (3d contour lie in XY plane since it's planar)
            List <Point2f> points2dList = new List <Point2f>(4);
            List <Point3f> points3dList = new List <Point3f>(4);

            // Image dimensions
            float w = image.Cols;
            float h = image.Rows;

            // Normalized dimensions:
            points2dList.Add(new Point2f(0, 0));
            points2dList.Add(new Point2f(w, 0));
            points2dList.Add(new Point2f(w, h));
            points2dList.Add(new Point2f(0, h));

            pattern.points2d = MatOfPoint2f.FromArray(points2dList);

            points3dList.Add(new Point3f(-0.5f, -0.5f, 0));
            points3dList.Add(new Point3f(+0.5f, -0.5f, 0));
            points3dList.Add(new Point3f(+0.5f, +0.5f, 0));
            points3dList.Add(new Point3f(-0.5f, +0.5f, 0));

            pattern.points3d = MatOfPoint3f.FromArray(points3dList);

            ExtractFeatures(pattern.grayImg, ref pattern.keypoints, pattern.descriptors);
            Train(pattern);
        }
    private void GetCubies(List <MatOfPoint> contours, Mat imgMat, int index, List <Cubies> cubies)
    {
        MatOfPoint2f matOfPoint2f = new MatOfPoint2f();
        MatOfPoint2f approxCurve  = new MatOfPoint2f();
        MatOfPoint   approx       = new MatOfPoint();

        foreach (var contour in contours)
        {
            matOfPoint2f.fromList(contour.toList());
            Imgproc.approxPolyDP(matOfPoint2f, approxCurve, 0.1 * Imgproc.arcLength(matOfPoint2f, true), true);

            try
            {
                approxCurve.convertTo(approx, CvType.CV_32S);
                OpenCVForUnity.Rect rect = Imgproc.boundingRect(approx);

                if (approx.total() == 4)
                {
                    cubies.Add(new Cubies(rect.x, rect.y, colorsList[index]));
                    Imgproc.rectangle(imgMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 40, 150), 2);
                }
            }
            catch (ArgumentOutOfRangeException e) { }
        }

        print("Number of cubies: " + cubies.Count);
    }
Пример #16
0
        public static void TransformTv()
        {
            Mat img    = Cv2.ImRead(@"C:\prog\consoleHomography\ConsoleApp1\ConsoleApp1\images\input\in.jpg");
            Mat imgOut = new Mat();

            Point2f[] source =
            {
                new Point2f(62f,   59f),
                new Point2f(420f, 112f),
                new Point2f(425f, 300f),
                new Point2f(50f, 265f)
            };
            MatOfPoint2f src = MatOfPoint2f.FromArray(source);

            Point2f[] destination =
            {
                new Point2f(0f,                0f),
                new Point2f(img.Width,         0f),
                new Point2f(img.Width, img.Height),
                new Point2f(0f,        img.Height)
            };
            MatOfPoint2f dest = MatOfPoint2f.FromArray(destination);

            Mat m = Cv2.FindHomography(src, dest);

            Cv2.WarpPerspective(img, imgOut, m, img.Size());

            string imgName = $"{DateTime.Now:dd_MM_yyyy_HH_mm_ss}_out.jpg";

            Cv2.ImWrite(@"C:\prog\consoleHomography\ConsoleApp1\ConsoleApp1\images\output\" + imgName, imgOut);
        }
Пример #17
0
    void HomographyTransform(int i)
    {
        // Init homography result Mat
        homoMat_array[i] = new Mat(480, 640, CvType.CV_8UC1);

        // Init regular point array
        reg_point_array[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        reg_point_array[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        reg_point_array[2] = new Point(0.0, 0.0);
        reg_point_array[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        // Extract face_points corresponding with reg_points
        Point[] out_point_array = new Point[4];
        for (int j = 0; j < 4; j++)       // j :: face point count
        {
            int src_i = face_index[i, j];
            out_point_array[j] = proj_point_array[src_i];
        }

        MatOfPoint2f regPoints = new MatOfPoint2f(reg_point_array);
        MatOfPoint2f outPoints = new MatOfPoint2f(out_point_array);

        Mat Homo_Mat = Calib3d.findHomography(regPoints, outPoints);

        Imgproc.warpPerspective(rectMat_array[i], homoMat_array[i], Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
Пример #18
0
        private void Run()
        {
            //set 3d face object points.
            objectPoints = new MatOfPoint3f(new Point3(-31, 72, 86), //l eye
                                            new Point3(31, 72, 86),  //r eye
                                            new Point3(0, 40, 114),  //nose
                                            new Point3(-20, 15, 90), //l mouse
                                            new Point3(20, 15, 90),  //r mouse
                                            new Point3(-70, 60, -9), //l ear
                                            new Point3(70, 60, -9)   //r ear
                                            );
            imagePoints = new MatOfPoint2f();
            rvec        = new Mat();
            tvec        = new Mat();
            rotM        = new Mat(3, 3, CvType.CV_64FC1);

            //initialize FaceTracker
            faceTracker = new FaceTracker(tracker_model_json_filepath);
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            cascade = new CascadeClassifier();
            cascade.load(haarcascade_frontalface_alt_xml_filepath);
            //if (cascade.empty())
            //{
            //  Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //}


#if UNITY_ANDROID && !UNITY_EDITOR
            // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
            webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
            webCamTextureToMatHelper.Initialize();
        }
Пример #19
0
        // vector_vector_Point2f
        public static void Mat_to_vector_vector_Point2f(Mat m, List <MatOfPoint2f> pts)
        {
            if (m != null)
            {
                m.ThrowIfDisposed();
            }

            if (pts == null)
            {
                throw new CvException("Output List can't be null");
            }

            if (m == null)
            {
                throw new CvException("Input Mat can't be null");
            }

            List <Mat> mats = new List <Mat>(m.rows());

            Mat_to_vector_Mat(m, mats);
            foreach (Mat mi in mats)
            {
                MatOfPoint2f pt = new MatOfPoint2f(mi);
                pts.Add(pt);
                mi.release();
            }
            mats.Clear();
        }
        private double computeReprojectionErrors(List <Mat> objectPoints,
                                                 List <Mat> rvecs, List <Mat> tvecs, Mat perViewErrors)
        {
            MatOfPoint2f cornersProjected = new MatOfPoint2f();
            double       totalError       = 0;
            double       error;

            float[] viewErrors = new float[objectPoints.Count];

            MatOfDouble distortionCoefficients = new MatOfDouble(mDistortionCoefficients);
            int         totalPoints            = 0;

            for (int i = 0; i < objectPoints.Count; i++)
            {
                MatOfPoint3f points = new MatOfPoint3f(objectPoints[i]);
                Calib3d.Calib3d.ProjectPoints(points, rvecs[i], tvecs[i],
                                              mCameraMatrix, distortionCoefficients, cornersProjected);
                error = Core.Core.Norm(mCornersBuffer[i], cornersProjected, Core.Core.NormL2);

                int n = objectPoints[i].Rows();
                viewErrors[i] = (float)Math.Sqrt(error * error / n);
                totalError   += error * error;
                totalPoints  += n;
            }
            perViewErrors.Create(objectPoints.Count, 1, CvType.Cv32fc1);
            perViewErrors.Put(0, 0, viewErrors);

            return(Math.Sqrt(totalError / totalPoints));
        }
Пример #21
0
        // Use this for initialization
        void Start()
        {
            isUsingSeparateDetectionToggle.isOn = isUsingSeparateDetection;

            isShowingAxesToggle.isOn    = isShowingAxes;
            isShowingHeadToggle.isOn    = isShowingHead;
            isShowingEffectsToggle.isOn = isShowingEffects;

            webCamTextureToMatHelper = gameObject.GetComponent <OptimizationWebCamTextureToMatHelper> ();
            webCamTextureToMatHelper.Init();

            rectangleTracker     = new RectangleTracker();
            faceLandmarkDetector = new FaceLandmarkDetector(DlibFaceLandmarkDetector.Utils.getFilePath("shape_predictor_68_face_landmarks.dat"));

            // The coordinates of the detection object on the real world space connected with the pixel coordinates.(mm)
            objectPoints = new MatOfPoint3f(
                new Point3(-31, 72, 86), //l eye (Interpupillary breadth)
                new Point3(31, 72, 86),  //r eye (Interpupillary breadth)
                new Point3(0, 40, 114),  //nose (Nose top)
                new Point3(-20, 15, 90), //l mouse (Mouth breadth)
                new Point3(20, 15, 90),  //r mouse (Mouth breadth)
                new Point3(-69, 76, -2), //l ear (Bitragion breadth)
                new Point3(69, 76, -2)   //r ear (Bitragion breadth)
                );

            imagePoints = new MatOfPoint2f();
            rvec        = new Mat();
            tvec        = new Mat();
            rotMat      = new Mat(3, 3, CvType.CV_64FC1);
        }
Пример #22
0
        // Taken from http://docs.opencv.org/3.0-beta/doc/tutorials/calib3d/camera_calibration/camera_calibration.html
        float computeReprojectionErrors(List <Mat> objectPoints, List <Mat> imagePoints, List <Mat> rvecs, List <Mat> tvecs, Mat cameraMatrix, Mat distCoeffs, List <float> perViewErrors)
        {
            MatOfPoint2f imagePoints2 = new MatOfPoint2f();
            int          i, totalPoints = 0;
            float        totalErr = 0, err;

            int numItems = objectPoints.Count;

            for (i = 0; i < numItems; ++i)
            {
                MatOfPoint3f objectPointsi = new MatOfPoint3f(objectPoints[i]);
                MatOfPoint2f imagePointsi  = new MatOfPoint2f(imagePoints[i]);

                MatOfDouble distCoeffsv2 = new MatOfDouble(distCoeffs);
                Calib3d.projectPoints(objectPointsi, rvecs[i], tvecs[i], cameraMatrix, distCoeffsv2, imagePoints2);

                err = norml2(imagePointsi, imagePoints2);              // difference

                Size temp = objectPoints[i].size();
                int  n    = (int)temp.height;
                perViewErrors.Add(Mathf.Sqrt(err * err / (n)));                   // save for this view
                totalErr    += err * err;                                         // sum it up
                totalPoints += (int)n;
            }

            return(Mathf.Sqrt(totalErr / totalPoints));              // calculate the arithmetical mean
        }
    void Rectify(ref Point[] face_point_array, int i)
    {
        Debug.Log("R: Starting");
        homoMat_array[i] = new Mat(480, 640, CvType.CV_8UC1);

        reg_point_array[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        reg_point_array[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        reg_point_array[2] = new Point(0.0, 0.0);
        reg_point_array[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        Debug.Log("R: reg_point_array populated");

        MatOfPoint2f srcPoints = new MatOfPoint2f(face_point_array);
        MatOfPoint2f regPoints = new MatOfPoint2f(reg_point_array);

        // Debug.Log("R: src and reg points instantiated");

        Debug.LogFormat("Rectify Face Points; {0} \n {1} \n {2} \n {3}",
                        face_point_array[0], face_point_array[1], face_point_array[2], face_point_array[3]);

        // Creating the H Matrix
        Mat Homo_Mat = Calib3d.findHomography(srcPoints, regPoints);

        Debug.Log("R: H Matrix Instantiated");

        Imgproc.warpPerspective(cached_initMat, homoMat_array[i], Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));

        Debug.Log("R: image rectified");
    }
Пример #24
0
        // Use this for initialization
        public override void Setup()
        {
            //set 3d face object points.
            objectPoints68 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 120), //nose (Nose top)
                new Point3(-26, 15, 83),  //l mouse (Mouth breadth)
                new Point3(26, 15, 83),   //r mouse (Mouth breadth)
                new Point3(-79, 90, 0.0), //l ear (Bitragion breadth)
                new Point3(79, 90, 0.0)   //r ear (Bitragion breadth)
                );
            objectPoints5 = new MatOfPoint3f(
                new Point3(-23, 90, 83), //l eye (Inner corner of the eye)
                new Point3(23, 90, 83),  //r eye (Inner corner of the eye)
                new Point3(-50, 90, 80), //l eye (Tail of the eye)
                new Point3(50, 90, 80),  //r eye (Tail of the eye)
                new Point3(0.0, 50, 120) //nose (Nose top)
                );
            imagePoints = new MatOfPoint2f();


            float width  = 640;
            float height = 480;


            //set cameraparam
            int    max_d = (int)Mathf.Max(width, height);
            double fx    = max_d;
            double fy    = max_d;
            double cx    = width / 2.0f;
            double cy    = height / 2.0f;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());


            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            didUpdateHeadRotation = false;
        }
Пример #25
0
    void Start()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Resources/aragaki.jpg", 1);
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB);

        dstMat = srcMat.clone();

        ///* 仿射变换
        Point        srcPoint0 = new Point(0, 0);
        Point        srcPoint1 = new Point(srcMat.width() - 1, 0);
        Point        srcPoint2 = new Point(0, srcMat.height() - 1);
        MatOfPoint2f srcTri    = new MatOfPoint2f(new Point[3] {
            srcPoint0, srcPoint1, srcPoint2
        });
        Point        dstPoint0 = new Point(0, srcMat.height() * 0.33d);
        Point        dstPoint1 = new Point(srcMat.width() * 0.85d, srcMat.height() * 0.25d);
        Point        dstPoint2 = new Point(srcMat.width() * 0.15d, srcMat.height() * 0.7d);
        MatOfPoint2f dstTri    = new MatOfPoint2f(new Point[3] {
            dstPoint0, dstPoint1, dstPoint2
        });

        warpMat = Imgproc.getAffineTransform(srcTri, dstTri);
        Imgproc.warpAffine(srcMat, dstMat, warpMat, new Size(dstMat.width(), dstMat.height()));
        //*/

        /* 旋转变换
         * //拷贝整个画布
         * dstMat.copyTo(srcMat);
         * Point center = new Point(srcMat.width() / 2, srcMat.height() / 2);
         * double angle = -50.0d;
         * double scale = -0.6d;
         * rotMat = Imgproc.getRotationMatrix2D(center, angle, scale);
         * Imgproc.warpAffine(srcMat, dstMat, rotMat, new Size(dstMat.width(), dstMat.height()));
         */

        /*
         * //透视变换
         * Point srcPoint0 = new Point(0, 0);
         * Point srcPoint1 = new Point(srcMat.width() - 1, 0);
         * Point srcPoint2 = new Point(0, srcMat.height() - 1);
         * Point srcPoint3 = new Point(srcMat.width() - 1, srcMat.height() - 1);
         * MatOfPoint2f srcTri = new MatOfPoint2f(new Point[4] { srcPoint0, srcPoint1, srcPoint2, srcPoint3 });
         * Point dstPoint0 = new Point(srcMat.width() * 0.05d, srcMat.height() * 0.33d);
         * Point dstPoint1 = new Point(srcMat.width() * 0.9d, srcMat.height() * 0.25d);
         * Point dstPoint2 = new Point(srcMat.width() * 0.2d, srcMat.height() * 0.7d);
         * Point dstPoint3 = new Point(srcMat.width() * 0.8d, srcMat.height() * 0.9d);
         * MatOfPoint2f dstTri = new MatOfPoint2f(new Point[4] { dstPoint0, dstPoint1, dstPoint2, dstPoint3 });
         * warpMat = Imgproc.getPerspectiveTransform(srcTri, dstTri);
         * Imgproc.warpPerspective(srcMat, dstMat, warpMat, new Size(dstMat.width(), dstMat.height()));
         */

        Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height());

        Utils.matToTexture2D(dstMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        dstImage.sprite         = sp;
        dstImage.preserveAspect = true;
    }
Пример #26
0
        /// <summary>
        /// Add a pair of real space + image space points.
        /// Beware that calibration can fail if pattern is not rotated to fade forward, so that z is zero.
        /// Also ensure that the point order in the the two point sets are matching.
        /// </summary>
        /// <param name="patternRealModelSample">Must be measured in millimeters</param>
        /// <param name="patternImageSample"></param>
        public void AddSample(MatOfPoint3f patternRealModelSample, MatOfPoint2f patternImageSample)
        {
            //Debug.Log( "patternRealModelSample\n" + patternRealModelSample.dump() );
            //Debug.Log( "patternImageSample\n" + patternImageSample.dump() );

            _patternRealSamples.Add(patternRealModelSample.clone());
            _patternImageSamples.Add(patternImageSample.clone());
        }
Пример #27
0
 public static void PutPoint2f(MatOfPoint2f target, int index, float x, float y)
 {
     if (index >= target.size().height)
     {
         throw new Exception("Your mat of point is not big enough. Use alloc(capacity) before setting elements.");
     }
     target.put(index, 0, x, y);
 }
Пример #28
0
 // Start is called before the first frame update
 void Start()
 {
     //Anders magic number
     _drawingPlaceMat = new Mat(100, 150, CvType.CV_8UC4);
     _drawingPlaceMat.setTo(new Scalar(255, 255, 255));
     _imagePoints = new MatOfPoint2f();
     _imagePoints.alloc(4);
 }
Пример #29
0
        void applyAffineTransform(Mat warpImage, Mat src, MatOfPoint2f srcTri, MatOfPoint2f dstTri)
        {
            // Given a pair of triangles, find the affine transform.
            Mat warpMat = Imgproc.getAffineTransform(srcTri, dstTri);

            // Apply the Affine Transform just found to the src image
            Imgproc.warpAffine(src, warpImage, warpMat, warpImage.size(), Imgproc.INTER_LINEAR, Core.BORDER_REFLECT_101, new Scalar(255, 0, 0, 255));
        }
Пример #30
0
    public Mat create(Mat cameraMat, List <Region> regions)
    {
        if (regions.Count == 0)
        {
            return(null);
        }

        var regionGroups = new Dictionary <string, List <Region> >();

        for (var i = 0; i < regions.Count; i++)
        {
            var exist = isConcatenateGroup(regions[i], regionGroups);
            if (!exist)
            {
                regionGroups[regions[i].id] = new List <Region> {
                    regions[i]
                };
            }
        }

        Mat resultTexture = Mat.zeros(regions[0].parentSize, cameraMat.type());

        foreach (KeyValuePair <string, List <Region> > pair in regionGroups)
        {
            // 領域グループ内の領域が一つの場合
            if (pair.Value.Count == 1)
            {
                var tex = createForOne(cameraMat, pair.Value[0]);
                Core.add(resultTexture, tex, resultTexture);
                continue;
            }

            // 領域グループ内の領域の輪郭点を全て一つのリストにまとめる
            List <Point> points = new List <Point>();
            foreach (var region in pair.Value)
            {
                points.AddRange(region.candidate.contour2f.toList());
            }

            // グループの輪郭作成
            MatOfPoint2f contours = new MatOfPoint2f();
            contours.fromList(points);

            // グループの輪郭のRotatedRectを得る
            var ellipseRotatedRect = Imgproc.fitEllipse(contours);

            // 事前用意してあるテクスチャ画像を食品領域のRotatedRectに射影変換
            Mat originalTexture = selectOriginalTextureImage();

            Mat texture = Mat.zeros(regions[0].parentSize, cameraMat.type());
            ARUtil.affineTransform(originalTexture, texture, ellipseRotatedRect);

            Core.add(resultTexture, texture, resultTexture);
        }

        return(coverBlackArea(resultTexture));
    }
Пример #31
0
        /// <summary>
        /// projects points from the model coordinate space to the image coordinates. 
        /// Also computes derivatives of the image coordinates w.r.t the intrinsic 
        /// and extrinsic camera parameters
        /// </summary>
        /// <param name="objectPoints">Array of object points, 3xN/Nx3 1-channel or 
        /// 1xN/Nx1 3-channel, where N is the number of points in the view.</param>
        /// <param name="rvec">Rotation vector (3x1).</param>
        /// <param name="tvec">Translation vector (3x1).</param>
        /// <param name="cameraMatrix">Camera matrix (3x3)</param>
        /// <param name="distCoeffs">Input vector of distortion coefficients 
        /// (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements. 
        /// If the vector is null, the zero distortion coefficients are assumed.</param>
        /// <param name="imagePoints">Output array of image points, 2xN/Nx2 1-channel 
        /// or 1xN/Nx1 2-channel</param>
        /// <param name="jacobian">Optional output 2Nx(10 + numDistCoeffs) jacobian matrix 
        /// of derivatives of image points with respect to components of the rotation vector, 
        /// translation vector, focal lengths, coordinates of the principal point and 
        /// the distortion coefficients. In the old interface different components of 
        /// the jacobian are returned via different output parameters.</param>
        /// <param name="aspectRatio">Optional “fixed aspect ratio” parameter. 
        /// If the parameter is not 0, the function assumes that the aspect ratio (fx/fy) 
        /// is fixed and correspondingly adjusts the jacobian matrix.</param>
        public static void ProjectPoints(IEnumerable<Point3f> objectPoints,
                                         double[] rvec, double[] tvec,
                                         double[,] cameraMatrix, double[] distCoeffs,
                                         out Point2f[] imagePoints,
                                         out double[,] jacobian,
                                         double aspectRatio = 0)
        {
            if (objectPoints == null)
                throw new ArgumentNullException(nameof(objectPoints));
            if (rvec == null)
                throw new ArgumentNullException(nameof(rvec));
            if (rvec.Length != 3)
                throw new ArgumentException("rvec.Length != 3");
            if (tvec == null)
                throw new ArgumentNullException(nameof(tvec));
            if (tvec.Length != 3)
                throw new ArgumentException("tvec.Length != 3");
            if (cameraMatrix == null)
                throw new ArgumentNullException(nameof(cameraMatrix));
            if (cameraMatrix.GetLength(0) != 3 || cameraMatrix.GetLength(1) != 3)
                throw new ArgumentException("cameraMatrix must be double[3,3]");

            Point3f[] objectPointsArray = EnumerableEx.ToArray(objectPoints);
            using (var objectPointsM = new Mat(objectPointsArray.Length, 1, MatType.CV_32FC3, objectPointsArray))
            using (var rvecM = new Mat(3, 1, MatType.CV_64FC1, rvec))
            using (var tvecM = new Mat(3, 1, MatType.CV_64FC1, tvec))
            using (var cameraMatrixM = new Mat(3, 3, MatType.CV_64FC1, cameraMatrix))
            using (var imagePointsM = new MatOfPoint2f())
            {
                var distCoeffsM = new Mat();
                if (distCoeffs != null)
                    distCoeffsM = new Mat(distCoeffs.Length, 1, MatType.CV_64FC1, distCoeffs);
                var jacobianM = new MatOfDouble();

                NativeMethods.calib3d_projectPoints_Mat(objectPointsM.CvPtr,
                    rvecM.CvPtr, tvecM.CvPtr, cameraMatrixM.CvPtr, distCoeffsM.CvPtr,
                    imagePointsM.CvPtr, jacobianM.CvPtr, aspectRatio);

                imagePoints = imagePointsM.ToArray();
                jacobian = jacobianM.ToRectangularArray();
            }
        }
Пример #32
0
 /// <summary>
 /// Computes convex hull for a set of 2D points.
 /// </summary>
 /// <param name="points">The input 2D point set, represented by CV_32SC2 or CV_32FC2 matrix</param>
 /// <param name="clockwise">If true, the output convex hull will be oriented clockwise, 
 /// otherwise it will be oriented counter-clockwise. Here, the usual screen coordinate 
 /// system is assumed - the origin is at the top-left corner, x axis is oriented to the right, 
 /// and y axis is oriented downwards.</param>
 /// <returns>The output convex hull. It is a vector of points that form the 
 /// hull (must have the same type as the input points).</returns>
 public Point2f[] ConvexHullFloatPoints(InputArray points, bool clockwise = false)
 {
     var dst = new MatOfPoint2f();
     Cv2.ConvexHull(points, dst, clockwise, true);
     return dst.ToArray();
 }
Пример #33
0
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log ("OnWebCamTextureToMatHelperInited");

                        Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

                        colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
                        texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

                        matOpFlowThis = new Mat ();
                        matOpFlowPrev = new Mat ();
                        MOPcorners = new MatOfPoint ();
                        mMOP2fptsThis = new MatOfPoint2f ();
                        mMOP2fptsPrev = new MatOfPoint2f ();
                        mMOP2fptsSafe = new MatOfPoint2f ();
                        mMOBStatus = new MatOfByte ();
                        mMOFerr = new MatOfFloat ();

                        gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);

                        Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

                        float width = 0;
                        float height = 0;

                        width = gameObject.transform.localScale.x;
                        height = gameObject.transform.localScale.y;

                        float widthScale = (float)Screen.width / width;
                        float heightScale = (float)Screen.height / height;
                        if (widthScale < heightScale) {
                                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                        } else {
                                Camera.main.orthographicSize = height / 2;
                        }

                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        //			webCamTextureToMatHelper.Play ();
        }
Пример #34
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();

                                matOpFlowThis.Dispose ();
                                matOpFlowPrev.Dispose ();
                                MOPcorners.Dispose ();
                                mMOP2fptsThis.Dispose ();
                                mMOP2fptsPrev.Dispose ();
                                mMOP2fptsSafe.Dispose ();
                                mMOBStatus.Dispose ();
                                mMOFerr.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();
                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        matOpFlowThis = new Mat ();
                                        matOpFlowPrev = new Mat ();
                                        MOPcorners = new MatOfPoint ();
                                        mMOP2fptsThis = new MatOfPoint2f ();
                                        mMOP2fptsPrev = new MatOfPoint2f ();
                                        mMOP2fptsSafe = new MatOfPoint2f ();
                                        mMOBStatus = new MatOfByte ();
                                        mMOFerr = new MatOfFloat ();

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif
            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = webCamTexture.width / 2;
                                        #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                if (mMOP2fptsPrev.rows () == 0) {

                                        // first time through the loop so we need prev and this mats
                                        // plus prev points
                                        // get this mat
                                        Imgproc.cvtColor (rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                                        // copy that to prev mat
                                        matOpFlowThis.copyTo (matOpFlowPrev);

                                        // get prev corners
                                        Imgproc.goodFeaturesToTrack (matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                                        mMOP2fptsPrev.fromArray (MOPcorners.toArray ());

                                        // get safe copy of this corners
                                        mMOP2fptsPrev.copyTo (mMOP2fptsSafe);
                                } else {
                                        // we've been through before so
                                        // this mat is valid. Copy it to prev mat
                                        matOpFlowThis.copyTo (matOpFlowPrev);

                                        // get this mat
                                        Imgproc.cvtColor (rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                                        // get the corners for this mat
                                        Imgproc.goodFeaturesToTrack (matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                                        mMOP2fptsThis.fromArray (MOPcorners.toArray ());

                                        // retrieve the corners from the prev mat
                                        // (saves calculating them again)
                                        mMOP2fptsSafe.copyTo (mMOP2fptsPrev);

                                        // and save this corners for next time through

                                        mMOP2fptsThis.copyTo (mMOP2fptsSafe);
                                }

                                /*
            Parameters:
            prevImg first 8-bit input image
            nextImg second input image
            prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
            nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
            status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
            err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
            */
                                Video.calcOpticalFlowPyrLK (matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                                if (!mMOBStatus.empty ()) {
                                        List<Point> cornersPrev = mMOP2fptsPrev.toList ();
                                        List<Point> cornersThis = mMOP2fptsThis.toList ();
                                        List<byte> byteStatus = mMOBStatus.toList ();

                                        int x = 0;
                                        int y = byteStatus.Count - 1;

                                        for (x = 0; x < y; x++) {
                                                if (byteStatus [x] == 1) {
                                                        Point pt = cornersThis [x];
                                                        Point pt2 = cornersPrev [x];

                                                        Core.circle (rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                                                        Core.line (rgbaMat, pt, pt2, colorRed, iLineThickness);
                                                }
                                        }
                                }

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }
            }
        }
        /// <summary>
        /// Converts the screen point.
        /// </summary>
        /// <returns>The screen point.</returns>
        /// <param name="screenPoint">Screen point.</param>
        /// <param name="quad">Quad.</param>
        /// <param name="cam">Cam.</param>
        static Point convertScreenPoint(Point screenPoint, GameObject quad, Camera cam)
        {
            Vector2 tl;
                        Vector2 tr;
                        Vector2 br;
                        Vector2 bl;

                        tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));

                        Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2);
                        Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2);

                        srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
                        dstRectMat.put (0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y);

                        Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat);

                        //						Debug.Log ("srcRectMat " + srcRectMat.dump ());
                        //						Debug.Log ("dstRectMat " + dstRectMat.dump ());
                        //						Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ());

                        MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint);
                        MatOfPoint2f dstPointMat = new MatOfPoint2f ();

                        Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform);

                        //						Debug.Log ("srcPointMat " + srcPointMat.dump ());
                        //						Debug.Log ("dstPointMat " + dstPointMat.dump ());

                        return dstPointMat.toArray () [0];
        }
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
                        Imgproc.GaussianBlur (rgbaMat, rgbaMat, new OpenCVForUnity.Size (3, 3), 1, 1);
                        //Imgproc.medianBlur(mRgba, mRgba, 3);

                        if (!isColorSelected)
                                return;

                        List<MatOfPoint> contours = detector.getContours ();
                        detector.process (rgbaMat);

            //						Debug.Log ("Contours count: " + contours.Count);

                        if (contours.Count <= 0) {
                                return;
                        }

                        RotatedRect rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [0].toArray ()));

                        double boundWidth = rect.size.width;
                        double boundHeight = rect.size.height;
                        int boundPos = 0;

                        for (int i = 1; i < contours.Count; i++) {
                                rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [i].toArray ()));
                                if (rect.size.width * rect.size.height > boundWidth * boundHeight) {
                                        boundWidth = rect.size.width;
                                        boundHeight = rect.size.height;
                                        boundPos = i;
                                }
                        }

                        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect (new MatOfPoint (contours [boundPos].toArray ()));
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), boundRect.br (), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //						Debug.Log (
            //						" Row start [" +
            //								(int)boundRect.tl ().y + "] row end [" +
            //								(int)boundRect.br ().y + "] Col start [" +
            //								(int)boundRect.tl ().x + "] Col end [" +
            //								(int)boundRect.br ().x + "]");

                        double a = boundRect.br ().y - boundRect.tl ().y;
                        a = a * 0.7;
                        a = boundRect.tl ().y + a;

            //						Debug.Log (
            //						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

                        //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

                        MatOfPoint2f pointMat = new MatOfPoint2f ();
                        Imgproc.approxPolyDP (new MatOfPoint2f (contours [boundPos].toArray ()), pointMat, 3, true);
                        contours [boundPos] = new MatOfPoint (pointMat.toArray ());

                        MatOfInt hull = new MatOfInt ();
                        MatOfInt4 convexDefect = new MatOfInt4 ();
                        Imgproc.convexHull (new MatOfPoint (contours [boundPos].toArray ()), hull);

                        if (hull.toArray ().Length < 3)
                                return;

                        Imgproc.convexityDefects (new MatOfPoint (contours [boundPos]	.toArray ()), hull, convexDefect);

                        List<MatOfPoint> hullPoints = new List<MatOfPoint> ();
                        List<Point> listPo = new List<Point> ();
                        for (int j = 0; j < hull.toList().Count; j++) {
                                listPo.Add (contours [boundPos].toList () [hull.toList () [j]]);
                        }

                        MatOfPoint e = new MatOfPoint ();
                        e.fromList (listPo);
                        hullPoints.Add (e);

                        List<MatOfPoint> defectPoints = new List<MatOfPoint> ();
                        List<Point> listPoDefect = new List<Point> ();
                        for (int j = 0; j < convexDefect.toList().Count; j = j+4) {
                                Point farPoint = contours [boundPos].toList () [convexDefect.toList () [j + 2]];
                                int depth = convexDefect.toList () [j + 3];
                                if (depth > threasholdSlider.value && farPoint.y < a) {
                                        listPoDefect.Add (contours [boundPos].toList () [convexDefect.toList () [j + 2]]);
                                }
            //								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
                        }

                        MatOfPoint e2 = new MatOfPoint ();
                        e2.fromList (listPo);
                        defectPoints.Add (e2);

            //						Debug.Log ("hull: " + hull.toList ());
            //						Debug.Log ("defects: " + convexDefect.toList ());

                        Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            //                      int defectsTotal = (int)convexDefect.total();
            //						Debug.Log ("Defect total " + defectsTotal);

                        this.numberOfFingers = listPoDefect.Count;
                        if (this.numberOfFingers > 5)
                                this.numberOfFingers = 5;

            //						Debug.Log ("numberOfFingers " + numberOfFingers);

            //						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
                        numberOfFingersText.text = numberOfFingers.ToString ();

                        foreach (Point p in listPoDefect) {
                                Imgproc.circle (rgbaMat, p, 6, new Scalar (255, 0, 255, 255), -1);
                        }
        }
Пример #37
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();

                                matOpFlowThis.Dispose ();
                                matOpFlowPrev.Dispose ();
                                MOPcorners.Dispose ();
                                mMOP2fptsThis.Dispose ();
                                mMOP2fptsPrev.Dispose ();
                                mMOP2fptsSafe.Dispose ();
                                mMOBStatus.Dispose ();
                                mMOFerr.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();
                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                                        while (webCamTexture.width <= 16) {
                                                webCamTexture.GetPixels32 ();
                                                yield return new WaitForEndOfFrame ();
                                        }
                                        #endif
                                #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        matOpFlowThis = new Mat ();
                                        matOpFlowPrev = new Mat ();
                                        MOPcorners = new MatOfPoint ();
                                        mMOP2fptsThis = new MatOfPoint2f ();
                                        mMOP2fptsPrev = new MatOfPoint2f ();
                                        mMOP2fptsSafe = new MatOfPoint2f ();
                                        mMOBStatus = new MatOfByte ();
                                        mMOFerr = new MatOfFloat ();

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        updateLayout ();

                                        screenOrientation = Screen.orientation;
                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }
Пример #38
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                hsvMat.Dispose ();
                                if (roiHistMat != null)
                                        roiHistMat.Dispose ();
                                roiPointList.Clear ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        hsvMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC3);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif

            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = (((float)Screen.height/(float)Screen.width) * (float)webCamTexture.height) / 2.0f;
            #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }

                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                                Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                                Point[] points = roiPointList.ToArray ();

                                if (roiPointList.Count == 4) {

                                        using (Mat backProj = new Mat ()) {
                                                Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0);

                                                RotatedRect r = Video.CamShift (backProj, roiRect, termination);
                                                r.points (points);
                                        }

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {

                                if(Input.GetTouch(0).phase == TouchPhase.Ended){

                                    roiPointList.Clear ();
                                }

                            }
                                        #else
                                        if (Input.GetMouseButtonUp (0)) {
                                                roiPointList.Clear ();
                                        }
            #endif
                                }

                                if (roiPointList.Count < 4) {

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {
                                Touch t = Input.GetTouch(0);
                                if(t.phase == TouchPhase.Ended){
                                    roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main));
            //									Debug.Log ("touch X " + t.position.x);
            //									Debug.Log ("touch Y " + t.position.y);

                                    if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                    }
                                }

                            }
            #else
                                        //Mouse
                                        if (Input.GetMouseButtonUp (0)) {

                                                roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
            //												Debug.Log ("mouse X " + Input.mousePosition.x);
            //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                                                if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                                }
                                        }
            #endif

                                        if (roiPointList.Count == 4) {

                                                using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) {
                                                        roiRect = Imgproc.boundingRect (roiPointMat);
                                                }

                                                if (roiHistMat != null) {
                                                        roiHistMat.Dispose ();
                                                        roiHistMat = null;
                                                }
                                                roiHistMat = new Mat ();

                                                using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                                                using (Mat maskMat = new Mat ()) {

                                                        Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180));
                                                        Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

            //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                                                }
                                        }
                                }

                                if (points.Length < 4) {

                                        for (int i = 0; i < points.Length; i++) {
                                                Core.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2);
                                        }

                                } else {

                                        for (int i = 0; i < 4; i++) {
                                                Core.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2);
                                        }

                                        Core.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2);
                                }

                                Core.putText (rgbaMat, "PLEASE TOUCH 4 POINTS", new Point (5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }

                /// <summary>
                /// Converts the screen point.
                /// </summary>
                /// <returns>The screen point.</returns>
                /// <param name="screenPoint">Screen point.</param>
                /// <param name="quad">Quad.</param>
                /// <param name="cam">Cam.</param>
                static Point convertScreenPoint (Point screenPoint, GameObject quad, Camera cam)
                {
                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
            #else
                        Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
            #endif

                        Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2);
                        Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2);

                        srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
                        dstRectMat.put (0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y);

                        Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat);

            //						Debug.Log ("srcRectMat " + srcRectMat.dump ());
            //						Debug.Log ("dstRectMat " + dstRectMat.dump ());
            //						Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ());

                        MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint);
                        MatOfPoint2f dstPointMat = new MatOfPoint2f ();

                        Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform);

            //						Debug.Log ("srcPointMat " + srcPointMat.dump ());
            //						Debug.Log ("dstPointMat " + dstPointMat.dump ());

                        return dstPointMat.toArray () [0];
                }
            }
Пример #39
0
		/// <summary>
		/// Finds the candidates.
		/// </summary>
		/// <param name="contours">Contours.</param>
		/// <param name="detectedMarkers">Detected markers.</param>
		void findCandidates (List<MatOfPoint> contours, List<Marker> detectedMarkers)
		{
				MatOfPoint2f approxCurve = new MatOfPoint2f ();
				
				List<Marker> possibleMarkers = new List<Marker> ();
		
				// For each contour, analyze if it is a parallelepiped likely to be the marker
				for (int i=0; i<contours.Count; i++) {
						// Approximate to a polygon
						double eps = contours [i].toArray ().Length * 0.05;
						Imgproc.approxPolyDP (new MatOfPoint2f (contours [i].toArray ()), approxCurve, eps, true);

						Point[] approxCurveArray = approxCurve.toArray ();
			
						// We interested only in polygons that contains only four points
						if (approxCurveArray.Length != 4)
								continue;
			
						// And they have to be convex
						if (!Imgproc.isContourConvex (new MatOfPoint (approxCurveArray)))
								continue;

			
						// Ensure that the distance between consecutive points is large enough
						float minDist = float.MaxValue;

						for (int p = 0; p < 4; p++) {
								Point side = new Point (approxCurveArray [p].x - approxCurveArray [(p + 1) % 4].x, approxCurveArray [p].y - approxCurveArray [(p + 1) % 4].y);
								float squaredSideLength = (float)side.dot (side);
								minDist = Mathf.Min (minDist, squaredSideLength);
						}
			
						// Check that distance is not very small
						if (minDist < m_minContourLengthAllowed)
								continue;
			
						// All tests are passed. Save marker candidate:
						Marker m = new Marker ();
						m.points = new MatOfPoint ();

						List<Point> markerPointsList = new List<Point> ();
			
						for (int p = 0; p<4; p++)
								markerPointsList.Add (new Point (approxCurveArray [p].x, approxCurveArray [p].y));


			
						// Sort the points in anti-clockwise order
						// Trace a line between the first and second point.
						// If the third point is at the right side, then the points are anti-clockwise
						Point v1 = new Point (markerPointsList [1].x - markerPointsList [0].x, markerPointsList [1].y - markerPointsList [0].y);
						Point v2 = new Point (markerPointsList [2].x - markerPointsList [0].x, markerPointsList [2].y - markerPointsList [0].y);
			
						double o = (v1.x * v2.y) - (v1.y * v2.x);
			
						if (o < 0.0) {		 //if the third point is in the left side, then sort in anti-clockwise order
								Point tmp = markerPointsList [1];
								markerPointsList [1] = markerPointsList [3];
								markerPointsList [3] = tmp;

						}

						m.points.fromList (markerPointsList);
			
						possibleMarkers.Add (m);
				}
				approxCurve.Dispose ();

		        
				//Debug.Log ("possibleMarkers " + possibleMarkers.Count);
		
		
				// Remove these elements which corners are too close to each other.
				// First detect candidates for removal:
				List< Point > tooNearCandidates = new List<Point> ();
				for (int i=0; i<possibleMarkers.Count; i++) {
						Marker m1 = possibleMarkers [i];

						Point[] m1PointsArray = m1.points.toArray ();
			
						//calculate the average distance of each corner to the nearest corner of the other marker candidate
						for (int j=i+1; j<possibleMarkers.Count; j++) {
								Marker m2 = possibleMarkers [j];

								Point[] m2PointsArray = m2.points.toArray ();
				
								float distSquared = 0;
				
								for (int c = 0; c < 4; c++) {
										Point v = new Point (m1PointsArray [c].x - m2PointsArray [c].x, m1PointsArray [c].y - m2PointsArray [c].y);
										distSquared += (float)v.dot (v);
								}
				
								distSquared /= 4;
				
								if (distSquared < 100) {
										tooNearCandidates.Add (new Point (i, j));
								}
						}
				}
		
				// Mark for removal the element of the pair with smaller perimeter
				List<bool> removalMask = new List<bool> (possibleMarkers.Count);
				for (int i = 0; i < possibleMarkers.Count; i++) {
						removalMask.Add (false);
				}
		
				for (int i=0; i<tooNearCandidates.Count; i++) {

						float p1 = perimeter (possibleMarkers [(int)tooNearCandidates [i].x].points);
						float p2 = perimeter (possibleMarkers [(int)tooNearCandidates [i].y].points);
			
						int removalIndex;
						if (p1 > p2)
								removalIndex = (int)tooNearCandidates [i].x;
						else
								removalIndex = (int)tooNearCandidates [i].y;
			
						removalMask [removalIndex] = true;
				}
		
				// Return candidates
				detectedMarkers.Clear ();
				for (int i=0; i<possibleMarkers.Count; i++) {
						if (!removalMask [i])
								detectedMarkers.Add (possibleMarkers [i]);
				}
		}
Пример #40
0
        /// <summary>
        /// performs perspective transformation of each element of multi-channel input matrix
        /// </summary>
        /// <param name="src">The source two-channel or three-channel floating-point array; 
        /// each element is 2D/3D vector to be transformed</param>
        /// <param name="m">3x3 or 4x4 transformation matrix</param>
        /// <returns>The destination array; it will have the same size and same type as src</returns>
        public static Point2f[] PerspectiveTransform(IEnumerable<Point2f> src, Mat m)
        {
            if (src == null)
                throw new ArgumentNullException("src");
            if (m == null)
                throw new ArgumentNullException("m");

            using (var srcMat = MatOfPoint2f.FromArray(src))
            using (var dstMat = new MatOfPoint2f())
            {
                NativeMethods.core_perspectiveTransform_Mat(srcMat.CvPtr, dstMat.CvPtr, m.CvPtr);
                return dstMat.ToArray();
            }
        }
Пример #41
0
		/// <summary>
		/// Recognizes the markers.
		/// </summary>
		/// <param name="grayscale">Grayscale.</param>
		/// <param name="detectedMarkers">Detected markers.</param>
		void recognizeMarkers (Mat grayscale, List<Marker> detectedMarkers)
		{
				List<Marker> goodMarkers = new List<Marker> ();
		
				// Identify the markers
				for (int i=0; i<detectedMarkers.Count; i++) {
						Marker marker = detectedMarkers [i];

			
						// Find the perspective transformation that brings current marker to rectangular form
						Mat markerTransform = Imgproc.getPerspectiveTransform (new MatOfPoint2f (marker.points.toArray ()), m_markerCorners2d);
				

						// Transform image to get a canonical marker image
						Imgproc.warpPerspective (grayscale, canonicalMarkerImage, markerTransform, markerSize);
			
						MatOfInt nRotations = new MatOfInt (0);
						int id = Marker.getMarkerId (canonicalMarkerImage, nRotations, m_markerDesign);
						if (id != - 1) {
								marker.id = id;
//				                Debug.Log ("id " + id);

								//sort the points so that they are always in the same order no matter the camera orientation
								List<Point> MarkerPointsList = marker.points.toList ();

								//				std::rotate(marker.points.begin(), marker.points.begin() + 4 - nRotations, marker.points.end());
								MarkerPointsList = MarkerPointsList.Skip (4 - nRotations.toArray () [0]).Concat (MarkerPointsList.Take (4 - nRotations.toArray () [0])).ToList ();

								marker.points.fromList (MarkerPointsList);
				
								goodMarkers.Add (marker);
						}
						nRotations.Dispose ();
				}

//				Debug.Log ("goodMarkers " + goodMarkers.Count);
		
				// Refine marker corners using sub pixel accuracy
				if (goodMarkers.Count > 0) {
						List<Point> preciseCornersPoint = new List<Point> (4 * goodMarkers.Count);
						for (int i = 0; i < preciseCornersPoint.Capacity; i++) {
								preciseCornersPoint.Add (new Point (0, 0));
						}
						

			
						for (int i=0; i<goodMarkers.Count; i++) {
								Marker marker = goodMarkers [i];

								List<Point> markerPointsList = marker.points.toList ();
				
								for (int c = 0; c <4; c++) {
										preciseCornersPoint [i * 4 + c] = markerPointsList [c];
								}
						}

						MatOfPoint2f preciseCorners = new MatOfPoint2f (preciseCornersPoint.ToArray ());

						TermCriteria termCriteria = new TermCriteria (TermCriteria.MAX_ITER | TermCriteria.EPS, 30, 0.01);
						Imgproc.cornerSubPix (grayscale, preciseCorners, new Size (5, 5), new Size (-1, -1), termCriteria);

						preciseCornersPoint = preciseCorners.toList ();
			
						// Copy refined corners position back to markers
						for (int i=0; i<goodMarkers.Count; i++) {
								Marker marker = goodMarkers [i];

								List<Point> markerPointsList = marker.points.toList ();
				
								for (int c=0; c<4; c++) {
										markerPointsList [c] = preciseCornersPoint [i * 4 + c];
								}
						}
						preciseCorners.Dispose ();
				}

				detectedMarkers.Clear ();
				detectedMarkers.AddRange (goodMarkers);

		}
Пример #42
0
		/// <summary>
		/// Ises the into.
		/// </summary>
		/// <returns><c>true</c>, if into was ised, <c>false</c> otherwise.</returns>
		/// <param name="contour">Contour.</param>
		/// <param name="b">The blue component.</param>
		bool isInto (MatOfPoint2f contour, List<Point> b)
		{
				for (int i=0; i<b.Count; i++) {
						if (Imgproc.pointPolygonTest (contour, b [i], false) > 0)
								return true;
			    
				}
				return false;
	
		}
Пример #43
0
 // fixed FromArray behavior
 static Point2d[] MyPerspectiveTransform2(Point2f[] yourData, Mat transformationMatrix)
 {
     using (MatOfPoint2f s = MatOfPoint2f.FromArray(yourData))
     using (MatOfPoint2f d = new MatOfPoint2f())
     {
         Cv2.PerspectiveTransform(s, d, transformationMatrix);
         Point2f[] f = d.ToArray();
         return f.Select(Point2fToPoint2d).ToArray();
     }
 }