void Rectify(ref Point[] face_point_array, int i)
    {
        Debug.Log("R: Starting");
        homoMat_array[i] = new Mat(480, 640, CvType.CV_8UC1);

        reg_point_array[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        reg_point_array[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        reg_point_array[2] = new Point(0.0, 0.0);
        reg_point_array[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        Debug.Log("R: reg_point_array populated");

        MatOfPoint2f srcPoints = new MatOfPoint2f(face_point_array);
        MatOfPoint2f regPoints = new MatOfPoint2f(reg_point_array);

        // Debug.Log("R: src and reg points instantiated");

        Debug.LogFormat("Rectify Face Points; {0} \n {1} \n {2} \n {3}",
                        face_point_array[0], face_point_array[1], face_point_array[2], face_point_array[3]);

        // Creating the H Matrix
        Mat Homo_Mat = Calib3d.findHomography(srcPoints, regPoints);

        Debug.Log("R: H Matrix Instantiated");

        Imgproc.warpPerspective(cached_initMat, homoMat_array[i], Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));

        Debug.Log("R: image rectified");
    }
Exemple #2
0
    void HomographyTransform(int i)
    {
        // Init homography result Mat
        homoMat_array[i] = new Mat(480, 640, CvType.CV_8UC1);

        // Init regular point array
        reg_point_array[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        reg_point_array[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        reg_point_array[2] = new Point(0.0, 0.0);
        reg_point_array[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        // Extract face_points corresponding with reg_points
        Point[] out_point_array = new Point[4];
        for (int j = 0; j < 4; j++)       // j :: face point count
        {
            int src_i = face_index[i, j];
            out_point_array[j] = proj_point_array[src_i];
        }

        MatOfPoint2f regPoints = new MatOfPoint2f(reg_point_array);
        MatOfPoint2f outPoints = new MatOfPoint2f(out_point_array);

        Mat Homo_Mat = Calib3d.findHomography(regPoints, outPoints);

        Imgproc.warpPerspective(rectMat_array[i], homoMat_array[i], Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
Exemple #3
0
    // Update is called once per frame
    void Update()
    {
        MatDisplay.SetCameraFoV(41.5f);

        Image cameraImage = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (cameraImage == null)
        {
            return;
        }
        if (_cameraImageMat == null)
        {
            //First frame -> generate Mat with same dimensions as camera feed
            _cameraImageMat = new Mat(cameraImage.Height, cameraImage.Width, CvType.CV_8UC4);
        }
        _cameraImageMat.put(0, 0, cameraImage.Pixels); // transferring image data to Mat

        if (FingerPointTarget.GetComponent <ImageTargetBehaviour>().CurrentStatus != TrackableBehaviour.Status.TRACKED)
        {
            MatDisplay.DisplayMat(_cameraImageMat, MatDisplaySettings.FULL_BACKGROUND);
            return;
        }

        FindHomographyPoints(out var matDst, out var matObj);
        var H = Calib3d.findHomography(matObj, matDst);

        try
        {
            var bWMat                   = GetBWSkinColor();
            var fingerTipCoor           = FindFingerTip(bWMat);
            var fingerPointInWorldSpace = FingerPointInWorldSpace(fingerTipCoor);
            FingerPlane.position = fingerPointInWorldSpace;

            var colorPixelValue = FindPixelValue(_cameraImageMat, Color.position);
            var drawPixelValue  = FindPixelValue(bWMat, Draw.position);

            if ((int)drawPixelValue.First() == 255)
            {
                //Debug.Log($"{colorPixelValue[0]}, {colorPixelValue[1]}, {colorPixelValue[2]}");
                //Debug.Log("Found Draw");
                //draw at finger pos

                var camMask = PaintCircle(fingerTipCoor);
                DrawMaskOnCanvas(camMask, H, colorPixelValue);
            }
        }
        catch
        {
        }

        var blendTex = BlendMats(H, _cameraImageMat, _drawingPlaceMat);

        MatDisplay.DisplayMat(blendTex, MatDisplaySettings.FULL_BACKGROUND);
    }
Exemple #4
0
    // Warps cached_homoMat to outMat
    void HomographyTransform(ref Mat homoMat)
    {
        Corner_AR_Controller Homo_Controller = m_ARSessionManager.GetComponent <Corner_AR_Controller>();

        Point[] c2_scrpoints = Homo_Controller.GetScreenpoints(false);

        MatOfPoint2f initPoints = new MatOfPoint2f(regPointArray);
        MatOfPoint2f currPoints = new MatOfPoint2f(c2_scrpoints);

        Mat H = Calib3d.findHomography(initPoints, currPoints);

        Imgproc.warpPerspective(homoMat, outMat, H, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
        Core.flip(outMat, outMat, 0);
    }
Exemple #5
0
    private void refresh()
    {
        var defaultPoints = defaultCornerPoints.Select(e => e.point).ToArray();
        var destPoints    = CornerPoints.Select(e => e.point).ToArray();

        using (var defaultCornerMat = new MatOfPoint2f(defaultPoints))
            using (var destCornerMat = new MatOfPoint2f(destPoints))
                using (var defaultMat = new MatOfPoint2f(defaultVertices.Select(e => new Point(e.x, e.y)).ToArray()))
                    using (var destMat = new MatOfPoint2f(meshFilter.mesh.vertices.Select(e => new Point(e.x, e.y)).ToArray()))
                    {
                        var h = Calib3d.findHomography(defaultCornerMat, destCornerMat);
                        OpenCVForUnity.CoreModule.Core.perspectiveTransform(defaultMat, destMat, h);
                        var vertices = destMat.toList().Select(e => new Vector3((float)e.x, (float)e.y, 0f)).ToList();//resultPoints.Select (e => new Vector3((float)e.x,(float)e.y,0f)).ToList();
                        meshFilter.mesh.SetVertices(vertices);
                    }
    }
    void ComputerVisionAlgo(IntPtr greyscale)
    {
        Utils.copyToMat(greyscale, imageMat);

        // Inverting Image pixel values
        MatOfKeyPoint keyMat = new MatOfKeyPoint();

        inMat = imageMat;
        // inMat = (Mat.ones(imageMat.rows(), imageMat.cols(), CvType.CV_8UC1) * 255) - imageMat;

        // Creating Detector (Yellow Circle)
        // MatOfKeyPoint keyMat = new MatOfKeyPoint();
        // SimpleBlobDetector detector = SimpleBlobDetector.create();

        double[] homo_points = m_ARSessionManager.GetComponent <AR_Controller>().GetHomopoints();

        // Display Homography Points
        // outMat = inMat;
        // Imgproc.circle(outMat, new Point(homo_points[0], homo_points[1]), 5, new Scalar(0.0, 0.0, 255.0));
        // Imgproc.circle(outMat, new Point(homo_points[2], homo_points[3]), 5, new Scalar(0.0, 0.0, 255.0));
        // Imgproc.circle(outMat, new Point(homo_points[4], homo_points[5]), 5, new Scalar(0.0, 0.0, 255.0));
        // Imgproc.circle(outMat, new Point(homo_points[6], homo_points[7]), 5, new Scalar(0.0, 0.0, 255.0));

        // Creating MatOfPoint2f arrays for Homography Points
        Point[] srcPointArray = new Point[4];
        for (int i = 0; i < 4; i++)
        {
            srcPointArray[i] = new Point(homo_points[2 * i], homo_points[(2 * i) + 1]);
        }

        Point[] dstPointArray = new Point[4];
        dstPointArray[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        dstPointArray[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        dstPointArray[2] = new Point(0.0, 0.0);
        dstPointArray[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        MatOfPoint2f srcPoints = new MatOfPoint2f(srcPointArray);
        MatOfPoint2f dstPoints = new MatOfPoint2f(dstPointArray);

        // Creating the H Matrix
        Mat Homo_Mat = Calib3d.findHomography(srcPoints, dstPoints);

        Debug.Log(Homo_Mat);
        Debug.Log(outMat.size());

        Imgproc.warpPerspective(inMat, outMat, Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
    void ComputerVisionAlgo(IntPtr greyscale)
    {
        // Utils.copyToMat(greyscale, imageMat);
        inMat = cached_initMat;

        Plane_AR_Controller Homo_Controller = m_ARSessionManager.GetComponent <Plane_AR_Controller>();

        Point[] c1_scrpoints = Homo_Controller.GetScreenpoints(true);
        Point[] c2_scrpoints = Homo_Controller.GetScreenpoints(false);

        MatOfPoint2f initPoints = new MatOfPoint2f(c1_scrpoints);
        MatOfPoint2f currPoints = new MatOfPoint2f(c2_scrpoints);

        Mat H = Calib3d.findHomography(initPoints, currPoints);

        Imgproc.warpPerspective(inMat, outMat, H, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
Exemple #8
0
    public ImageObject warpImage(List <ImageObject> imageList)
    {
        Texture2D imgTexture = base64ImageToTexture(imageList[0].image);

        //Find the matching keypoints from the winner list.
        MatOfPoint2f queryPoints = new MatOfPoint2f();
        MatOfPoint2f matchPoints = new MatOfPoint2f();

        List <Point> queryPointsList = new List <Point>();
        List <Point> matchPointsList = new List <Point>();


        foreach (MatOfDMatch match in imageList[1].matches)
        {
            DMatch[] arrayDmatch = match.toArray();
            queryPointsList.Add(imageList[0].keyPoints.toList()[arrayDmatch[0].queryIdx].pt);
            matchPointsList.Add(imageList[1].keyPoints.toList()[arrayDmatch[0].trainIdx].pt);
        }
        queryPoints.fromList(queryPointsList);
        matchPoints.fromList(matchPointsList);

        //Calculate the homography of the best matching image
        //Mat homography = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 5.0);
        Mat homography    = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 3.0);
        Mat tempResultImg = new Mat();

        Imgproc.warpPerspective(imageList[0].imageMat, tempResultImg, homography, imageList[1].imageMat.size());

        //Show (red) annotations only
        ImageProcessor imageProcessor = new ImageProcessor();
        Mat            resultImg      = imageProcessor.ShowAnnotationsOnly(tempResultImg);

        //Show image
        Texture2D tempTexture = new Texture2D(imageList[1].imageMat.cols(), imageList[1].imageMat.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(resultImg, tempTexture);


        //Make black pixels transparent
        Texture2D texture = imageProcessor.removeColor(Color.black, tempTexture);

        return(new ImageObject(Convert.ToBase64String(texture.EncodeToPNG()), imageList[1].index));

        //return new ImageObject(Convert.ToBase64String(tempTexture.EncodeToPNG()), imageList[1].index);
    }
Exemple #9
0
    void Rectify(ref Point[] facePointArray, ref Mat cachedMat)
    {
        regPointArray[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        regPointArray[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        regPointArray[2] = new Point(0.0, 0.0);
        regPointArray[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        MatOfPoint2f srcPoints = new MatOfPoint2f(facePointArray);
        MatOfPoint2f regPoints = new MatOfPoint2f(regPointArray);

        Debug.LogFormat("Rectify Face Points; {0} \n {1} \n {2} \n {3}",
                        facePointArray[0], facePointArray[1], facePointArray[2], facePointArray[3]);

        // Creating the H Matrix
        Mat Homo_Mat = Calib3d.findHomography(srcPoints, regPoints);

        Imgproc.warpPerspective(imageMat, cachedMat, Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
Exemple #10
0
        void Rectify(ref Point[] face_point_array, int i)
        {
            rectMat_array[i] = new Mat(360, 640, CvType.CV_8UC1);

            Point[] reg_point_array = new Point[4];
            reg_point_array[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
            reg_point_array[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
            reg_point_array[2] = new Point(0.0, 0.0);
            reg_point_array[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

            MatOfPoint2f srcPoints = new MatOfPoint2f(face_point_array);
            MatOfPoint2f regPoints = new MatOfPoint2f(reg_point_array);

            // Debug.LogFormat("Rectify Face Points; {0} \n {1} \n {2} \n {3}",
            // face_point_array[0], face_point_array[1], face_point_array[2], face_point_array[3]);

            // Creating the H Matrix
            Mat Homo_Mat = Calib3d.findHomography(srcPoints, regPoints);

            Imgproc.warpPerspective(cached_initMat, rectMat_array[i], Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
        }
    // Detects Blobs with Detector Framework and stores Top-down view into cached_homoMat
    void BlobDetection()
    {
        SimpleBlobDetector detector = SimpleBlobDetector.create();

        // Core.flip(cached_initMat, imageMat, 0);
        cached_initMat = imageMat;

        keyMat = new MatOfKeyPoint();
        detector.detect(imageMat, keyMat);

        // Features2d.drawKeypoints(imageMat, keyMat, outMat);

        if (keyMat.rows() < 4)
        {
            return;
        }

        for (int i = 0; i < 4; i++)
        {
            srcPointArray[i] = new Point(keyMat.get(i, 0)[0], keyMat.get(i, 0)[1]);
        }

        SortPoints();

        regPointArray[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        regPointArray[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        regPointArray[2] = new Point(0.0, 0.0);
        regPointArray[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        MatOfPoint2f srcPoints = new MatOfPoint2f(srcPointArray);
        MatOfPoint2f regPoints = new MatOfPoint2f(regPointArray);

        // Creating the H Matrix
        Mat Homo_Mat = Calib3d.findHomography(srcPoints, regPoints);

        Imgproc.warpPerspective(imageMat, cached_homoMat, Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
Exemple #12
0
    void Update()
    {
        //Access camera image provided by Vuforia
        Image camImg = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (camImg != null)
        {
            if (camImageMat == null)
            {
                //First time -> instantiate camera image specific data
                camImageMat = new Mat(camImg.Height, camImg.Width, CvType.CV_8UC4);  //Note: rows=height, cols=width
            }

            camImageMat.put(0, 0, camImg.Pixels);

            //Replace with your own projection matrix. This approach only uses fy.
            cam.fieldOfView = 2 * Mathf.Atan(camImg.Height * 0.5f / fy) * Mathf.Rad2Deg;

            Vector3 worldPnt1 = corner1.transform.position;
            Vector3 worldPnt2 = corner2.transform.position;
            Vector3 worldPnt3 = corner3.transform.position;
            Vector3 worldPnt4 = corner4.transform.position;

            //See lecture slides
            Matrix4x4 Rt = cam.transform.worldToLocalMatrix;
            Matrix4x4 A  = Matrix4x4.identity;
            A.m00 = fx;
            A.m11 = fy;
            A.m02 = cx;
            A.m12 = cy;

            Matrix4x4 worldToImage = A * Rt;

            Vector3 hUV1 = worldToImage.MultiplyPoint3x4(worldPnt1);
            Vector3 hUV2 = worldToImage.MultiplyPoint3x4(worldPnt2);
            Vector3 hUV3 = worldToImage.MultiplyPoint3x4(worldPnt3);
            Vector3 hUV4 = worldToImage.MultiplyPoint3x4(worldPnt4);

            //hUV are the image coordinates in 2D homogeneous coordinates, we need to normalize, i.e., divide by Z
            Vector2 uv1 = new Vector2(hUV1.x, hUV1.y) / hUV1.z;
            Vector2 uv2 = new Vector2(hUV2.x, hUV2.y) / hUV2.z;
            Vector2 uv3 = new Vector2(hUV3.x, hUV3.y) / hUV3.z;
            Vector2 uv4 = new Vector2(hUV4.x, hUV4.y) / hUV4.z;

            //don't forget to alloc before putting values into a MatOfPoint2f
            imagePoints.put(0, 0, uv1.x, camImg.Height - uv1.y);
            imagePoints.put(1, 0, uv2.x, camImg.Height - uv2.y);
            imagePoints.put(2, 0, uv3.x, camImg.Height - uv3.y);
            imagePoints.put(3, 0, uv4.x, camImg.Height - uv4.y);

            //Debug draw points
            Point imgPnt1 = new Point(imagePoints.get(0, 0));
            Point imgPnt2 = new Point(imagePoints.get(1, 0));
            Point imgPnt3 = new Point(imagePoints.get(2, 0));
            Point imgPnt4 = new Point(imagePoints.get(3, 0));
            Imgproc.circle(camImageMat, imgPnt1, 5, new Scalar(255, 0, 0, 255));
            Imgproc.circle(camImageMat, imgPnt2, 5, new Scalar(0, 255, 0, 255));
            Imgproc.circle(camImageMat, imgPnt3, 5, new Scalar(0, 0, 255, 255));
            Imgproc.circle(camImageMat, imgPnt4, 5, new Scalar(255, 255, 0, 255));
            Scalar lineCl = new Scalar(200, 120, 0, 160);
            Imgproc.line(camImageMat, imgPnt1, imgPnt2, lineCl);
            Imgproc.line(camImageMat, imgPnt2, imgPnt3, lineCl);
            Imgproc.line(camImageMat, imgPnt3, imgPnt4, lineCl);
            Imgproc.line(camImageMat, imgPnt4, imgPnt1, lineCl);


            var destPoints = new MatOfPoint2f();             // Creating a destination
            destPoints.alloc(4);
            destPoints.put(0, 0, width, 0);
            destPoints.put(1, 0, width, height);
            destPoints.put(2, 0, 0, height);
            destPoints.put(3, 0, 0, 0);

            var homography = Calib3d.findHomography(imagePoints, destPoints);             // Finding the image

            Imgproc.warpPerspective(camImageMat, destPoints, homography, new Size(camImageMat.width(), camImageMat.height()));

            unwarpedTexture = unwarpedTextureClean;

            MatDisplay.MatToTexture(destPoints, ref unwarpedTexture);             // Take output and transform into texture

            if (Input.GetKey("space"))
            {
                fish.GetComponent <Renderer>().material.mainTexture = unwarpedTexture;
            }
            else
            {
                fish.GetComponent <Renderer>().material.mainTexture = tex;
            }

            MatDisplay.DisplayMat(destPoints, MatDisplaySettings.BOTTOM_LEFT);
            MatDisplay.DisplayMat(camImageMat, MatDisplaySettings.FULL_BACKGROUND);
        }
    }
Exemple #13
0
        /// <summary>
        /// Refines the matches with homography.
        /// </summary>
        /// <returns><c>true</c>, if matches with homography was refined, <c>false</c> otherwise.</returns>
        /// <param name="queryKeypoints">Query keypoints.</param>
        /// <param name="trainKeypoints">Train keypoints.</param>
        /// <param name="reprojectionThreshold">Reprojection threshold.</param>
        /// <param name="matches">Matches.</param>
        /// <param name="homography">Homography.</param>
        static bool refineMatchesWithHomography
        (
            MatOfKeyPoint queryKeypoints,
            MatOfKeyPoint trainKeypoints,
            float reprojectionThreshold,
            MatOfDMatch matches,
            Mat homography
        )
        {
            //Debug.Log ("matches " + matches.ToString ());

            int minNumberMatchesAllowed = 8;

            List <KeyPoint> queryKeypointsList = queryKeypoints.toList();
            List <KeyPoint> trainKeypointsList = trainKeypoints.toList();
            List <DMatch>   matchesList        = matches.toList();

            if (matchesList.Count < minNumberMatchesAllowed)
            {
                return(false);
            }

            // Prepare data for cv::findHomography
            List <Point> srcPointsList = new List <Point>(matchesList.Count);
            List <Point> dstPointsList = new List <Point>(matchesList.Count);

            for (int i = 0; i < matchesList.Count; i++)
            {
                srcPointsList.Add(trainKeypointsList[matchesList[i].trainIdx].pt);
                dstPointsList.Add(queryKeypointsList[matchesList[i].queryIdx].pt);
            }

            // Find homography matrix and get inliers mask
            using (MatOfPoint2f srcPoints = new MatOfPoint2f())
                using (MatOfPoint2f dstPoints = new MatOfPoint2f())
                    using (MatOfByte inliersMask = new MatOfByte(new byte[srcPointsList.Count]))
                    {
                        srcPoints.fromList(srcPointsList);
                        dstPoints.fromList(dstPointsList);

                        //Debug.Log ("srcPoints " + srcPoints.ToString ());
                        //Debug.Log ("dstPoints " + dstPoints.ToString ());

                        Calib3d.findHomography(srcPoints,
                                               dstPoints,
                                               Calib3d.FM_RANSAC,
                                               reprojectionThreshold,
                                               inliersMask, 2000, 0.955).copyTo(homography);

                        if (homography.rows() != 3 || homography.cols() != 3)
                        {
                            return(false);
                        }

                        //Debug.Log ("homography " + homography.ToString ());
                        //Debug.Log ("inliersMask " + inliersMask.dump ());

                        List <byte> inliersMaskList = inliersMask.toList();

                        List <DMatch> inliers = new List <DMatch>();
                        for (int i = 0; i < inliersMaskList.Count; i++)
                        {
                            if (inliersMaskList[i] == 1)
                            {
                                inliers.Add(matchesList[i]);
                            }
                        }

                        matches.fromList(inliers);
                        //Debug.Log ("matches " + matches.ToString ());
                    }

            return(matchesList.Count > minNumberMatchesAllowed);
        }
    public ImageString MatchFeatures(string base64image, List <string> base64imageList)
    {
        List <MatOfDMatch> winnerMatches   = new List <MatOfDMatch>();
        MatOfKeyPoint      winnerKeyPoints = new MatOfKeyPoint();
        Mat winnerImage = new Mat();
        int winnerIndex = -1;
        int winnerValue = 0;

        Texture2D        imgTexture  = base64ImageToTexture(base64image);
        List <Texture2D> imgTextures = new List <Texture2D>();

        for (int i = 0; i < base64imageList.Count; i++)
        {
            imgTextures.Add(base64ImageToTexture(base64imageList[i]));
        }

        //Create Mat from texture
        Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imgTexture, img1Mat);
        MatOfKeyPoint keypoints1   = new MatOfKeyPoint();
        Mat           descriptors1 = new Mat();

        FeatureDetector     detector  = FeatureDetector.create(FeatureDetector.ORB);
        DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

        //Detect keypoints and compute descriptors from photo.
        detector.detect(img1Mat, keypoints1);
        extractor.compute(img1Mat, keypoints1, descriptors1);

        Debug.Log("Billede features: " + descriptors1.rows());

        if (descriptors1.rows() < 10)
        {
            Debug.Log("ARRRRRRGH der er ikke mange descripters i mit original-billede");
            return(new ImageString(base64image, winnerIndex));
        }

        //Run through each image in list
        for (int i = 0; i < imgTextures.Count; i++)
        {
            Texture2D imgTexture2 = imgTextures[i];

            //Create Mat from texture
            Mat img2Mat = new Mat(imgTexture2.height, imgTexture2.width, CvType.CV_8UC3);
            Utils.texture2DToMat(imgTexture2, img2Mat);

            //Find keypoints and descriptors from image in list
            MatOfKeyPoint keypoints2   = new MatOfKeyPoint();
            Mat           descriptors2 = new Mat();

            detector.detect(img2Mat, keypoints2);
            extractor.compute(img2Mat, keypoints2, descriptors2);

            //Match photo with image from list
            DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
            Debug.Log("Billede2 features: " + descriptors2.rows());
            if (descriptors2.rows() < 10)
            {
                Debug.Log("ARRRRRRGH der er ikke mange descripters i mit test billede: " + i);
                continue;
            }

            List <MatOfDMatch> matchList = new List <MatOfDMatch>();
            matcher.knnMatch(descriptors1, descriptors2, matchList, 2);

            //Find the good matches and put them ind a list
            List <MatOfDMatch> good = new List <MatOfDMatch>();

            foreach (MatOfDMatch match in matchList)
            {
                DMatch[] arrayDmatch = match.toArray();
                if (arrayDmatch[0].distance < 0.7f * arrayDmatch[1].distance)
                {
                    good.Add(match);
                }
            }

            //Find the best match image based on the good lists
            if (good.Count > winnerThreshold && good.Count > winnerValue)
            {
                winnerImage     = img2Mat;
                winnerMatches   = good;
                winnerKeyPoints = keypoints2;
                winnerIndex     = i;
                winnerValue     = good.Count;
            }
        }

        Debug.Log("The winner is image: " + winnerIndex + " with a value of: " + winnerValue);

        //If no winner just return the original image
        if (winnerIndex == -1)
        {
            Debug.Log("No winner");
            return(new ImageString(base64image, winnerIndex));
        }

        Debug.Log("No winner");
        //Find the matching keypoints from the winner list.
        MatOfPoint2f queryPoints = new MatOfPoint2f();
        MatOfPoint2f matchPoints = new MatOfPoint2f();

        List <Point> queryPointsList = new List <Point>();
        List <Point> matchPointsList = new List <Point>();


        foreach (MatOfDMatch match in winnerMatches)
        {
            DMatch[] arrayDmatch = match.toArray();
            queryPointsList.Add(keypoints1.toList()[arrayDmatch[0].queryIdx].pt);
            matchPointsList.Add(winnerKeyPoints.toList()[arrayDmatch[0].trainIdx].pt);
        }
        queryPoints.fromList(queryPointsList);
        matchPoints.fromList(matchPointsList);

        //Calculate the homography of the best matching image
        Mat homography = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 5.0);
        Mat resultImg  = new Mat();

        Imgproc.warpPerspective(img1Mat, resultImg, homography, winnerImage.size());

        //Show image
        Texture2D texture = new Texture2D(winnerImage.cols(), winnerImage.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(resultImg, texture);

        return(new ImageString(Convert.ToBase64String(texture.EncodeToPNG()), winnerIndex));
    }
Exemple #15
0
    public bool descriptorsORB_Old(Mat RGB, Mat cameraFeed, string targetName)//找出特徵的顏色方法三(可運行但效率不佳放棄)
    {
        if (RGB == null)
        {
            Debug.Log("RGB Mat is Null");
            return(false);
        }
        //將傳入的RGB存入Src
        Mat SrcMat = new Mat();

        RGB.copyTo(SrcMat);
        //比對樣本
        Texture2D imgTexture = Resources.Load(targetName) as Texture2D;
        //  Texture2D imgTexture2 = Resources.Load("lenaK") as Texture2D;

        //Texture2D轉Mat
        Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imgTexture, img1Mat);

        //創建 ORB的特徵點裝置
        FeatureDetector     detector  = FeatureDetector.create(FeatureDetector.ORB);
        DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);
        //產生存放特徵點Mat
        MatOfKeyPoint keypoints1     = new MatOfKeyPoint();
        Mat           descriptors1   = new Mat();
        MatOfKeyPoint keypointsSrc   = new MatOfKeyPoint();
        Mat           descriptorsSrc = new Mat();

        //找特徵點圖1
        detector.detect(img1Mat, keypoints1);
        extractor.compute(img1Mat, keypoints1, descriptors1);
        //找特徵點圖Src
        detector.detect(SrcMat, keypointsSrc);
        extractor.compute(SrcMat, keypointsSrc, descriptorsSrc);

        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
        MatOfDMatch       matches = new MatOfDMatch();

        matcher.match(descriptors1, descriptorsSrc, matches);
        DMatch[] arrayDmatch = matches.toArray();

        for (int i = arrayDmatch.Length - 1; i >= 0; i--)
        {
            //   Debug.Log("match " + i + ": " + arrayDmatch[i].distance);
        }
        //做篩選
        double max_dist = 0;
        double min_dist = 100;
        //-- Quick calculation of max and min distances between keypoints
        double dist = new double();

        for (int i = 0; i < matches.rows(); i++)
        {
            dist = arrayDmatch[i].distance;
            if (dist < min_dist)
            {
                min_dist = dist;
            }
            if (dist > max_dist)
            {
                max_dist = dist;
            }
        }
        Debug.Log("Max dist :" + max_dist);
        Debug.Log("Min dist :" + min_dist);
        //只畫好的點

        List <DMatch> matchesGoodList = new List <DMatch>();

        for (int i = 0; i < matches.rows(); i++)
        {
            //if (arrayDmatch[i].distance < RateDist.value * min_dist)
            //{
            //    //Debug.Log("match " + i + ": " + arrayDmatch[i].distance);
            //    matchesGoodList.Add(arrayDmatch[i]);
            //}
        }
        MatOfDMatch matchesGood = new MatOfDMatch();

        matchesGood.fromList(matchesGoodList);

        //Draw Keypoints
        Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat);

        //做輸出的轉換予宣告

        Mat resultImg = new Mat();
        // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg);

        List <Point> P1 = new List <Point>();
        // List<Point> P2 = new List<Point>();
        List <Point> pSrc = new List <Point>();

        Debug.Log("MatchCount" + matchesGoodList.Count);
        for (int i = 0; i < matchesGoodList.Count; i++)
        {
            P1.Add(new Point(keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x, keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y));
            pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y));
            //Debug.Log("ID = " + matchesGoodList[i].queryIdx );
            //Debug.Log("x,y =" + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x + "," + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y);
            //Debug.Log("x,y =" + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.x + "," + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.y);
        }

        MatOfPoint2f p2fTarget = new MatOfPoint2f(P1.ToArray());
        MatOfPoint2f p2fSrc    = new MatOfPoint2f(pSrc.ToArray());

        Mat          matrixH         = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3);
        List <Point> srcPointCorners = new List <Point>();

        srcPointCorners.Add(new Point(0, 0));
        srcPointCorners.Add(new Point(img1Mat.width(), 0));
        srcPointCorners.Add(new Point(img1Mat.width(), img1Mat.height()));
        srcPointCorners.Add(new Point(0, img1Mat.height()));

        Mat          originalRect       = Converters.vector_Point2f_to_Mat(srcPointCorners);
        List <Point> srcPointCornersEnd = new List <Point>();

        srcPointCornersEnd.Add(new Point(0, img1Mat.height()));
        srcPointCornersEnd.Add(new Point(0, 0));
        srcPointCornersEnd.Add(new Point(img1Mat.width(), 0));
        srcPointCornersEnd.Add(new Point(img1Mat.width(), img1Mat.height()));

        Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd);

        Core.perspectiveTransform(originalRect, changeRect, matrixH);
        List <Point> srcPointCornersSave = new List <Point>();

        Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave);

        if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5)
        {
            Debug.Log("Match Out Put image is to small");
            SrcMat.copyTo(cameraFeed);
            SrcMat.release();
            Imgproc.putText(cameraFeed, "X-S", new Point(10, 50), 0, 1, new Scalar(255, 255, 255), 2);
            return(false);
        }
        //    Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg);
        Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3);

        SrcMat.copyTo(cameraFeed);
        keypoints1.release();
        img1Mat.release();
        SrcMat.release();
        return(true);
    }
Exemple #16
0
//============================================================
//=================以下為沒有再使用的函式=====================
//============================================================

    //找出特徵的顏色方法三(ORB特徵點比對)
    public bool descriptorsORB(Mat RGB, Mat cameraFeed, string targetName)
    {
        if (RGB == null)
        {
            Debug.Log("RGB Mat is Null");
            return(false);
        }
        //將傳入的RGB存入Src
        Mat SrcMat = new Mat();

        RGB.copyTo(SrcMat);
        //比對樣本載入
        Texture2D imgTexture = Resources.Load(targetName) as Texture2D;

        //Texture2D轉Mat
        Mat targetMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imgTexture, targetMat);

        //創建 ORB的特徵點裝置
        FeatureDetector     detector  = FeatureDetector.create(FeatureDetector.ORB);
        DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

        //產生存放特徵點Mat
        MatOfKeyPoint keypointsTarget   = new MatOfKeyPoint();
        Mat           descriptorsTarget = new Mat();
        MatOfKeyPoint keypointsSrc      = new MatOfKeyPoint();
        Mat           descriptorsSrc    = new Mat();

        //找特徵點圖Target
        detector.detect(targetMat, keypointsTarget);
        extractor.compute(targetMat, keypointsTarget, descriptorsTarget);

        //找特徵點圖Src
        detector.detect(SrcMat, keypointsSrc);
        extractor.compute(SrcMat, keypointsSrc, descriptorsSrc);

        //創建特徵點比對物件
        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
        MatOfDMatch       matches = new MatOfDMatch();

        //丟入兩影像的特徵點
        matcher.match(descriptorsTarget, descriptorsSrc, matches);
        DMatch[] arrayDmatch = matches.toArray();

        //做篩選
        double max_dist = 0;
        double min_dist = 100;
        //-- Quick calculation of max and min distances between keypoints
        double dist = new double();

        for (int i = 0; i < matches.rows(); i++)
        {
            dist = arrayDmatch[i].distance;
            if (dist < min_dist)
            {
                min_dist = dist;
            }
            if (dist > max_dist)
            {
                max_dist = dist;
            }
        }
        Debug.Log("Max dist :" + max_dist);
        Debug.Log("Min dist :" + min_dist);

        List <DMatch> matchesGoodList = new List <DMatch>();

        MatOfDMatch matchesGood = new MatOfDMatch();

        matchesGood.fromList(matchesGoodList);

        //Draw Keypoints
        Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat);

        List <Point> pTarget = new List <Point>();
        List <Point> pSrc    = new List <Point>();

        Debug.Log("MatchCount" + matchesGoodList.Count);
        for (int i = 0; i < matchesGoodList.Count; i++)
        {
            pTarget.Add(new Point(keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.x, keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.y));
            pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y));
        }

        MatOfPoint2f p2fTarget = new MatOfPoint2f(pTarget.ToArray());
        MatOfPoint2f p2fSrc    = new MatOfPoint2f(pSrc.ToArray());

        Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3);

        List <Point> srcPointCorners = new List <Point>();

        srcPointCorners.Add(new Point(0, 0));
        srcPointCorners.Add(new Point(targetMat.width(), 0));
        srcPointCorners.Add(new Point(targetMat.width(), targetMat.height()));
        srcPointCorners.Add(new Point(0, targetMat.height()));
        Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners);

        List <Point> srcPointCornersEnd = new List <Point>();

        srcPointCornersEnd.Add(new Point(0, targetMat.height()));
        srcPointCornersEnd.Add(new Point(0, 0));
        srcPointCornersEnd.Add(new Point(targetMat.width(), 0));
        srcPointCornersEnd.Add(new Point(targetMat.width(), targetMat.height()));
        Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd);

        Core.perspectiveTransform(originalRect, changeRect, matrixH);
        List <Point> srcPointCornersSave = new List <Point>();

        Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave);

        if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5)
        {
            Debug.Log("Match Out Put image is to small");
            SrcMat.copyTo(cameraFeed);
            SrcMat.release();
            Imgproc.putText(cameraFeed, targetName, srcPointCornersSave[0], 0, 1, new Scalar(255, 255, 255), 2);
            return(false);
        }
        //畫出框框
        Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3);
        //畫中心
        Point middlePoint = new Point((srcPointCornersSave[0].x + srcPointCornersSave[2].x) / 2, (srcPointCornersSave[0].y + srcPointCornersSave[2].y) / 2);

        Imgproc.line(SrcMat, middlePoint, middlePoint, new Scalar(0, 0, 255), 10);


        SrcMat.copyTo(cameraFeed);
        keypointsTarget.release();
        targetMat.release();
        SrcMat.release();
        return(true);
    }
Exemple #17
0
    void Update()
    {
        // Camera image from Vuforia
        Image camImg = CameraDevice.Instance.GetCameraImage(PIXEL_FORMAT.RGBA8888);

        if (camImg != null && camImg.Height > 0)
        {
            if (camImageMat == null)
            {
                // Vuforia seems to enforce a resolution of width=640px for any camera
                Debug.Log("rows: " + camImg.Height + ", cols: " + camImg.Width);
                camImageMat = new Mat(camImg.Height, camImg.Width, CvType.CV_8UC4);
            }

            // Put Vuforia camera feed pixels into OpenCV display matrix
            camImageMat.put(0, 0, camImg.Pixels);


            // DEBUG TEST: In OpenCV, we operate in screen coordinates (pixels),
            // and we know the resolution of the Vuforia camera
            // Here, we draw a red circle in screen space using OpenCV
            //Imgproc.circle(camImageMat, new Point(300, 200), 20, new Scalar(255, 0, 0, 128));


            //---- <THIS IS WHERE THE CORNER PROJECTION BEGINS> ----

            // Get corner's position in world coordinates
            Matrix4x4 m1        = corner1.transform.localToWorldMatrix;
            Matrix4x4 m2        = corner2.transform.localToWorldMatrix;
            Matrix4x4 m3        = corner3.transform.localToWorldMatrix;
            Matrix4x4 m4        = corner4.transform.localToWorldMatrix;
            Vector3   worldPnt1 = m1.MultiplyPoint3x4(corner1.transform.position);
            Vector3   worldPnt2 = m2.MultiplyPoint3x4(corner2.transform.position);
            Vector3   worldPnt3 = m3.MultiplyPoint3x4(corner3.transform.position);
            Vector3   worldPnt4 = m4.MultiplyPoint3x4(corner4.transform.position);

            // Matrix that goes from world to the camera coordinate system
            Matrix4x4 Rt = cam.transform.worldToLocalMatrix;

            // Camera intrinsics
            Matrix4x4 A = Matrix4x4.identity;
            A.m00 = fx;
            A.m11 = fy;
            A.m02 = cx;
            A.m12 = cy;
            //see cheat sheet

            Matrix4x4 worldToImage = A * Rt;

            Vector3 hUV1 = worldToImage.MultiplyPoint3x4(worldPnt1);
            Vector3 hUV2 = worldToImage.MultiplyPoint3x4(worldPnt2);
            Vector3 hUV3 = worldToImage.MultiplyPoint3x4(worldPnt3);
            Vector3 hUV4 = worldToImage.MultiplyPoint3x4(worldPnt4);

            // Remember that we dealing with homogeneous coordinates.
            // Here we normalize them to get Image coordinates
            Vector2 uv1 = new Vector2(hUV1.x, hUV1.y) / hUV1.z;
            Vector2 uv2 = new Vector2(hUV2.x, hUV2.y) / hUV2.z;
            Vector2 uv3 = new Vector2(hUV3.x, hUV3.y) / hUV3.z;
            Vector2 uv4 = new Vector2(hUV4.x, hUV4.y) / hUV4.z;

            // We flip the v-coordinate of our image points to make the Unity (Vuforia) data compatible with OpenCV
            // Remember that in OpenCV the (0,0) pos is in the top left corner in contrast to the bottom left corner
            float maxV = camImg.Height - 1; // The -1 is because pixel coordinates are 0-indexed
            imagePoints.put(0, 0, uv1.x, maxV - uv1.y);
            imagePoints.put(1, 0, uv2.x, maxV - uv2.y);
            imagePoints.put(2, 0, uv3.x, maxV - uv3.y);
            imagePoints.put(3, 0, uv4.x, maxV - uv4.y);

            Point imgPnt1 = new Point(imagePoints.get(0, 0));
            Point imgPnt2 = new Point(imagePoints.get(1, 0));
            Point imgPnt3 = new Point(imagePoints.get(2, 0));
            Point imgPnt4 = new Point(imagePoints.get(3, 0));

            //For debug. Show if impPnti found the right position in img coordinate
            Imgproc.circle(camImageMat, imgPnt1, 10, new Scalar(255, 0, 0, 200), 5);
            Imgproc.circle(camImageMat, imgPnt2, 20, new Scalar(255, 255, 0, 255), 5);
            Imgproc.circle(camImageMat, imgPnt3, 30, new Scalar(0, 255, 0, 255), 5);
            Imgproc.circle(camImageMat, imgPnt4, 40, new Scalar(0, 0, 255, 255), 4);


            MatOfPoint2f unwarpPoints;
            unwarpPoints = new MatOfPoint2f();
            unwarpPoints.alloc(4);
            //according to the resolution
            unwarpPoints.put(0, 0, 0, 0);
            unwarpPoints.put(1, 0, 0, 442);
            unwarpPoints.put(2, 0, 442, 442);
            unwarpPoints.put(3, 0, 442, 0);
            //compute homography matrix

            Mat H    = Calib3d.findHomography(imagePoints, unwarpPoints);
            Mat Hinv = H.inv();
            Mat dst  = new Mat(442, 442, CvType.CV_8UC4);
            texMat = MatDisplay.LoadRGBATexture("/models/dog_tex.png");
            Imgproc.warpPerspective(texMat, dst, Hinv, new Size(442, 442));

            // MatDisplay.MatToTexture(dst, ref tex);
            //rd.material.mainTexture = tex;
            //Debug.Log(imgPnt2);
            //Debug.Log(imgPnt2);
            //---- </THIS IS WHERE THE CORNER PROJECTION ENDS> ----
            // Display the Mat that includes video feed and debug points
            // Do not forget to disable Vuforia's video background and change your aspect ratio to 4:3!
            MatDisplay.DisplayMat(camImageMat, MatDisplaySettings.FULL_BACKGROUND);



            //---- MATCH INTRINSICS OF REAL CAMERA AND PROJECTION MATRIX OF VIRTUAL CAMERA ----
            // See lecture slides for why this formular works.
            cam.fieldOfView = 2 * Mathf.Atan(camImg.Height * 0.5f / fy) * Mathf.Rad2Deg;
        }
    }
Exemple #18
0
    IEnumerator ProcessImage(XRCpuImage image, Vector3 viewportScaling)
    {
        // Create the async conversion request.

        XRCpuImage.ConversionParams conv_params = new XRCpuImage.ConversionParams
        {
            // Use the full image.
            inputRect = new RectInt(0, 0, image.width, image.height),

            // Downsample by 2.
            outputDimensions = new Vector2Int(image.width, image.height),

            // Color image format.
            outputFormat = TextureFormat.RGBA32,

            // Flip across the Y axis.
            transformation = XRCpuImage.Transformation.MirrorY
        };

        var request = image.ConvertAsync(conv_params);

        // Wait for the conversion to complete.
        while (!request.status.IsDone())
        {
            yield return(null);
        }

        // Check status to see if the conversion completed successfully.
        if (request.status != XRCpuImage.AsyncConversionStatus.Ready)
        {
            // Something went wrong.
            Debug.LogErrorFormat("Request failed with status {0}", request.status);

            // Dispose even if there is an error.
            request.Dispose();
            yield break;
        }

        // Image data is ready. Let's apply it to a Texture2D.
        var rawData = request.GetData <byte>();

        // Create a texture if necessary.
        if (m_Texture == null)
        {
            m_Texture = new Texture2D(
                request.conversionParams.outputDimensions.x,
                request.conversionParams.outputDimensions.y,
                request.conversionParams.outputFormat,
                false);
        }

        // Copy the image data into the texture.
        m_Texture.LoadRawTextureData(rawData);
        m_Texture.Apply();

        Debug.Log("TEX: " + m_Texture.height + "h " + m_Texture.width + "w");
        Debug.Log("Screen: " + m_Texture.height + "h " + m_Texture.width + "w");


        Mat inputMat  = new Mat(image.height, image.width, CvType.CV_8UC4);
        Mat outputMat = new Mat(1500, 1500, CvType.CV_8UC4);

        Utils.fastTexture2DToMat(m_Texture, inputMat);

        if (tex2d == null)
        {
            tex2d = new Texture2D(1500,
                                  1500, conv_params.outputFormat, false);
        }

        Debug.Log("positionAnchor");
        Debug.Log(positionAnchor);

        Debug.Log("anchorRef");
        Debug.Log(anchorRef);

        int counter = 0;

        Point[] srcPointsVec = new Point[4];
        foreach (var point in anchorRef.getWorldPoints())
        {
            Vector3 screenPoint = mainCam.WorldToScreenPoint(point);
            srcPointsVec[counter] = new Point(screenPoint.y * viewportScaling.y / 3,
                                              100 - screenPoint.x * viewportScaling.x / 3);
            counter += 1;
        }


        MatOfPoint2f srcPoints = new MatOfPoint2f(new[]
        {
            srcPointsVec[0],
            srcPointsVec[1],
            srcPointsVec[2],
            srcPointsVec[3]
        });


        MatOfPoint2f dstPoints = new MatOfPoint2f(new[]
        {
            new Point(195 * 1.25, 0),
            new Point(0, 0),
            new Point(0, 280 * 1.25),
            new Point(195 * 1.25, 280 * 1.25),
        });

        Mat H = Calib3d.findHomography(srcPoints, dstPoints);


        Imgproc.warpPerspective(inputMat, outputMat, H, new Size(1500, 1500));

        Utils.fastMatToTexture2D(outputMat, tex2d);


        if (websocket.State == WebSocketState.Open && canProcess)
        {
            websocket.Send(ImageConversion.EncodeToJPG(tex2d, 50));
            canProcess = false;
        }

        inputMat.Dispose();
        inputMat = null;
        outputMat.Dispose();
        outputMat = null;
        request.Dispose();
    }