/// <summary>
    /// Computes the pose.
    /// </summary>
    /// <param name="pattern">Pattern.</param>
    /// <param name="camMatrix">Cam matrix.</param>
    /// <param name="distCoeff">Dist coeff.</param>
    public void computePose(Pattern pattern, Mat camMatrix, MatOfDouble distCoeff)
    {
        Mat Rvec = new Mat();
        Mat Tvec = new Mat();
        Mat raux = new Mat();
        Mat taux = new Mat();

        Calib3d.solvePnP(pattern.points3d, points2d, camMatrix, distCoeff, raux, taux);
        raux.convertTo(Rvec, CvType.CV_32F);
        taux.convertTo(Tvec, CvType.CV_32F);

        Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);

        Calib3d.Rodrigues(Rvec, rotMat);

        pose3d.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)Tvec.get(0, 0) [0]));
        pose3d.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)Tvec.get(1, 0) [0]));
        pose3d.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)Tvec.get(2, 0) [0]));
        pose3d.SetRow(3, new Vector4(0, 0, 0, 1));

//      Debug.Log ("pose3d " + pose3d.ToString ());

        Rvec.Dispose();
        Tvec.Dispose();
        raux.Dispose();
        taux.Dispose();
        rotMat.Dispose();
    }
Exemple #2
0
    /// <summary>
    /// 補正対象のテクスチャが初期化された際に補正用の設定を初期化する
    /// </summary>
    /// <param name="texture"></param>
    private void VideoCaptureController_ChangeTextureEvent(TextureHolderBase sender, Texture texture)
    {
        rgbMat = new Mat(texture.height, texture.width, CvType.CV_8UC3);
        if (this.texture != null)
        {
            if (this.texture.width != texture.width || this.texture.height != texture.height)
            {
                DestroyImmediate(this.texture);
                this.texture = null;
            }
        }
        if (this.texture == null)
        {
            this.texture = new Texture2D(texture.width, texture.height, TextureFormat.RGB24, false);
        }

        newCameraMatrix = Calib3d.getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, videoCaptureController.RGBMat.size(), 0, videoCaptureController.RGBMat.size());
        if (isFisheye)
        {
            Calib3d.fisheye_initUndistortRectifyMap(this.cameraMatrix, this.distCoeffs, new Mat(), newCameraMatrix, videoCaptureController.RGBMat.size(), CvType.CV_32FC1, mapX, mapY);
        }
        else
        {
            Calib3d.initUndistortRectifyMap(this.cameraMatrix, this.distCoeffs, new Mat(), newCameraMatrix, videoCaptureController.RGBMat.size(), CvType.CV_32FC1, mapX, mapY);
        }

        //

        OnTextureInitialized(GetTexture());
    }
    public double calibrateFromCorrespondences(List <Vector3> imagePoints, List <Vector3> worldPoints)
    {
        List <Mat> imagePointsCvMat = new List <Mat> ();

        imagePointsCvMat.Add(PutImagePointsIntoCVMat(imagePoints));
        List <Mat> worldPointsCvMat = new List <Mat> ();

        worldPointsCvMat.Add(PutObjectPointsIntoCVMat(worldPoints));

        Size        size        = new Size(width, height);
        Mat         intrinsic   = CreateIntrinsicGuess(height, width);
        MatOfDouble distortion  = new MatOfDouble(0, 0, 0, 0);
        List <Mat>  rotation    = new List <Mat> ();
        List <Mat>  translation = new List <Mat> ();
        int         flags       = 6377; //Calib3d.CALIB_ZERO_TANGENT_DIST | Calib3d.CALIB_USE_INTRINSIC_GUESS | Calib3d.CALIB_FIX_K1 | Calib3d.CALIB_FIX_K2 | Calib3d.CALIB_FIX_K3 | Calib3d.CALIB_FIX_K4 | Calib3d.CALIB_FIX_K5;

//        Debug.Log ("imagePoint2fMat: " + imagePointsCvMat [0].dump ());
//        Debug.Log ("worldPoints3fMat: " + worldPointsCvMat [0].dump ());
//        Debug.Log ("size: " + size);
//        Debug.Log ("intrinsic: " + intrinsic.dump ());
//        Debug.Log ("distortion: " + distortion.dump ());
//        Debug.Log ("flags: " + flags);

        double repErr = -1;

        repErr = Calib3d.calibrateCamera(worldPointsCvMat, imagePointsCvMat, size, intrinsic, distortion, rotation, translation, flags);
        ApplyCalibrationToUnityCamera(intrinsic, rotation [0], translation [0]);

//        Debug.Log ("intrinsic: " + intrinsic.dump ());
//        Debug.Log ("rotation: " + rotation [0].dump ());
//        Debug.Log ("translation: " + translation [0].dump ());

        return(repErr);
    }
Exemple #4
0
        /// <summary>
        /// Estimates the position.
        /// </summary>
        /// <param name="detectedMarkers">Detected markers.</param>
        void estimatePosition(List <Marker> detectedMarkers)
        {
            for (int i = 0; i < detectedMarkers.Count; i++)
            {
                Marker m = detectedMarkers [i];

                Mat Rvec = new Mat();
                Mat Tvec = new Mat();
                Mat raux = new Mat();
                Mat taux = new Mat();
                Calib3d.solvePnP(m_markerCorners3d, new MatOfPoint2f(m.points.toArray()), camMatrix, distCoeff, raux, taux);


                raux.convertTo(Rvec, CvType.CV_32F);
                taux.convertTo(Tvec, CvType.CV_32F);

                Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);
                Calib3d.Rodrigues(Rvec, rotMat);


                m.transformation.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)Tvec.get(0, 0) [0]));
                m.transformation.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)Tvec.get(1, 0) [0]));
                m.transformation.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)Tvec.get(2, 0) [0]));
                m.transformation.SetRow(3, new Vector4(0, 0, 0, 1));

//                      Debug.Log ("m.transformation " + m.transformation.ToString ());


                Rvec.Dispose();
                Tvec.Dispose();
                raux.Dispose();
                taux.Dispose();
                rotMat.Dispose();
            }
        }
Exemple #5
0
        public void calibrate()
        {
            List <Mat> rvecs = new List <Mat>();
            List <Mat> tvecs = new List <Mat>();
            Mat        reprojectionErrors = new Mat();
            List <Mat> objectPoints       = new List <Mat>();

            objectPoints.Add(Mat.Zeros(mCornersSize, 1, CvType.Cv32fc3));
            calcBoardCornerPositions(objectPoints[0]);
            for (int i = 1; i < mCornersBuffer.Count; i++)
            {
                objectPoints.Add(objectPoints[0]);
            }

            Calib3d.CalibrateCamera(objectPoints, mCornersBuffer, mImageSize,
                                    mCameraMatrix, mDistortionCoefficients, rvecs, tvecs, mFlags);

            mIsCalibrated = Core.CheckRange(mCameraMatrix) &&
                            Core.CheckRange(mDistortionCoefficients);

            mRms = computeReprojectionErrors(objectPoints, rvecs, tvecs, reprojectionErrors);
            Log.Info(TAG, String.Format("Average re-projection error: %f", mRms));
            Log.Info(TAG, "Camera matrix: " + mCameraMatrix.Dump());
            Log.Info(TAG, "Distortion coefficients: " + mDistortionCoefficients.Dump());
        }
    void Rectify(ref Point[] face_point_array, int i)
    {
        Debug.Log("R: Starting");
        homoMat_array[i] = new Mat(480, 640, CvType.CV_8UC1);

        reg_point_array[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        reg_point_array[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        reg_point_array[2] = new Point(0.0, 0.0);
        reg_point_array[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        Debug.Log("R: reg_point_array populated");

        MatOfPoint2f srcPoints = new MatOfPoint2f(face_point_array);
        MatOfPoint2f regPoints = new MatOfPoint2f(reg_point_array);

        // Debug.Log("R: src and reg points instantiated");

        Debug.LogFormat("Rectify Face Points; {0} \n {1} \n {2} \n {3}",
                        face_point_array[0], face_point_array[1], face_point_array[2], face_point_array[3]);

        // Creating the H Matrix
        Mat Homo_Mat = Calib3d.findHomography(srcPoints, regPoints);

        Debug.Log("R: H Matrix Instantiated");

        Imgproc.warpPerspective(cached_initMat, homoMat_array[i], Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));

        Debug.Log("R: image rectified");
    }
Exemple #7
0
    void HomographyTransform(int i)
    {
        // Init homography result Mat
        homoMat_array[i] = new Mat(480, 640, CvType.CV_8UC1);

        // Init regular point array
        reg_point_array[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        reg_point_array[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        reg_point_array[2] = new Point(0.0, 0.0);
        reg_point_array[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        // Extract face_points corresponding with reg_points
        Point[] out_point_array = new Point[4];
        for (int j = 0; j < 4; j++)       // j :: face point count
        {
            int src_i = face_index[i, j];
            out_point_array[j] = proj_point_array[src_i];
        }

        MatOfPoint2f regPoints = new MatOfPoint2f(reg_point_array);
        MatOfPoint2f outPoints = new MatOfPoint2f(out_point_array);

        Mat Homo_Mat = Calib3d.findHomography(regPoints, outPoints);

        Imgproc.warpPerspective(rectMat_array[i], homoMat_array[i], Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
Exemple #8
0
        private double computeReprojectionErrors(List <Mat> objectPoints,
                                                 List <Mat> rvecs, List <Mat> tvecs, Mat perViewErrors)
        {
            MatOfPoint2f cornersProjected = new MatOfPoint2f();
            double       totalError       = 0;
            double       error;

            float[] viewErrors = new float[objectPoints.Count];

            MatOfDouble distortionCoefficients = new MatOfDouble(mDistortionCoefficients);
            int         totalPoints            = 0;

            for (int i = 0; i < objectPoints.Count; i++)
            {
                MatOfPoint3f points = new MatOfPoint3f(objectPoints[i]);
                Calib3d.ProjectPoints(points, rvecs[i], tvecs[i],
                                      mCameraMatrix, distortionCoefficients, cornersProjected);
                error = Core.Norm(mCornersBuffer[i], cornersProjected, Core.NormL2);

                int n = objectPoints[i].Rows();
                viewErrors[i] = (float)Math.Sqrt(error * error / n);
                totalError   += error * error;
                totalPoints  += n;
            }
            perViewErrors.Create(objectPoints.Count, 1, CvType.Cv32fc1);
            perViewErrors.Put(0, 0, viewErrors);

            return(Math.Sqrt(totalError / totalPoints));
        }
Exemple #9
0
    public virtual void Draw(Mat src, Mat dst)
    {
        var points      = new MatOfPoint2f();
        var patternSize = new Size((int)SizeX, (int)SizeY);

        var found = false;

        switch (boardType)
        {
        case BoardType.ChessBoard:
            found = Calib3d.findChessboardCorners(src, patternSize, points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE);

            break;

        case BoardType.CirclesGrid:
            found = Calib3d.findCirclesGrid(src, patternSize, points, Calib3d.CALIB_CB_SYMMETRIC_GRID);
            break;

        case BoardType.AsymmetricCirclesGrid:
            found = Calib3d.findCirclesGrid(src, patternSize, points, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
            break;
        }

        if (found)
        {
            if (boardType == BoardType.ChessBoard)
            {
                Imgproc.cornerSubPix(src, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1));
            }

            Calib3d.drawChessboardCorners(dst, patternSize, points, found);
        }
    }
Exemple #10
0
        // Taken from http://docs.opencv.org/3.0-beta/doc/tutorials/calib3d/camera_calibration/camera_calibration.html
        float computeReprojectionErrors(List <Mat> objectPoints, List <Mat> imagePoints, List <Mat> rvecs, List <Mat> tvecs, Mat cameraMatrix, Mat distCoeffs, List <float> perViewErrors)
        {
            MatOfPoint2f imagePoints2 = new MatOfPoint2f();
            int          i, totalPoints = 0;
            float        totalErr = 0, err;

            int numItems = objectPoints.Count;

            for (i = 0; i < numItems; ++i)
            {
                MatOfPoint3f objectPointsi = new MatOfPoint3f(objectPoints[i]);
                MatOfPoint2f imagePointsi  = new MatOfPoint2f(imagePoints[i]);

                MatOfDouble distCoeffsv2 = new MatOfDouble(distCoeffs);
                Calib3d.projectPoints(objectPointsi, rvecs[i], tvecs[i], cameraMatrix, distCoeffsv2, imagePoints2);

                err = norml2(imagePointsi, imagePoints2);              // difference

                Size temp = objectPoints[i].size();
                int  n    = (int)temp.height;
                perViewErrors.Add(Mathf.Sqrt(err * err / (n)));                   // save for this view
                totalErr    += err * err;                                         // sum it up
                totalPoints += (int)n;
            }

            return(Mathf.Sqrt(totalErr / totalPoints));              // calculate the arithmetical mean
        }
Exemple #11
0
        void Update()
        {
            if (!_cameraTexture || !_dirtyCameraTexture)
            {
                return;
            }

            // Init.
            AdaptResources();

            // Update mat texture ( If the texture looks correct in Unity, then it needs to be flipped for OpenCV ).
            TrackingToolsHelper.TextureToMat(_cameraTexture, !_flipCameraTexture, ref _camTexMat, ref _tempTransferColors, ref _tempTransferTexture);

            // Convert to grayscale if more than one channel, else copy (and convert bit rate if necessary).
            TrackingToolsHelper.ColorMatToLumanceMat(_camTexMat, _camTexGrayMat);

            // During testing, undistort before.
            if (_state == State.Testing)
            {
                Calib3d.undistort(_camTexGrayMat, _camTexGrayUndistortMat, _intrinsicsCalibrator.sensorMat, _intrinsicsCalibrator.distortionCoeffsMat);
            }

            // Find chessboard.
            Mat  chessboardSourceMat = _state == State.Calibrating ? _camTexGrayMat : _camTexGrayUndistortMat;
            bool foundBoard          = TrackingToolsHelper.FindChessboardCorners(chessboardSourceMat, _chessPatternSize, ref _chessCornersImageMat);

            if (foundBoard)
            {
                TrackingToolsHelper.DrawFoundPattern(chessboardSourceMat, _chessPatternSize, _chessCornersImageMat);
            }

            // During calibration, undistort after.
            if (_state == State.Calibrating)
            {
                if (_intrinsicsCalibrator.sampleCount > correctDistortionSampleCountThreshold)
                {
                    Calib3d.undistort(_camTexGrayMat, _camTexGrayUndistortMat, _intrinsicsCalibrator.sensorMat, _intrinsicsCalibrator.distortionCoeffsMat);
                }
                else
                {
                    _camTexGrayMat.copyTo(_camTexGrayUndistortMat);
                }
            }

            // State dependent updates.
            switch (_state)
            {
            case State.Calibrating: UpdateCalibration(foundBoard); break;

            case State.Testing: UpdateTesting(foundBoard); break;
            }

            // UI.
            Utils.fastMatToTexture2D(_camTexGrayUndistortMat, _processedCameraTexture);               // Will flip as default.
            _previewFlasher.Update();
            UpdateSampleCounterUI();

            _dirtyCameraTexture = false;
        }
Exemple #12
0
 public void OnUpdateIntrinsic(Mat cameraMatrix, Mat distCoeffs)
 {
     Clear();
     this.cameraMatrix = cameraMatrix;
     this.distCoeffs   = distCoeffs;
     newCameraMatrix   = Calib3d.getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, videoCaptureController.RGBMat.size(), 0, videoCaptureController.RGBMat.size());
     Calib3d.initUndistortRectifyMap(this.cameraMatrix, this.distCoeffs, new Mat(), newCameraMatrix, videoCaptureController.RGBMat.size(), CvType.CV_32FC1, mapX, mapY);
 }
Exemple #13
0
        public static bool FindAsymmetricCirclesGrid(Mat grayTexMat, Vector2Int patternSize, ref MatOfPoint2f centerPoints)
        {
            Vector2IntToSize(patternSize, ref _tempSize);
            if (centerPoints == null)
            {
                centerPoints = new MatOfPoint2f();
            }

            return(Calib3d.findCirclesGrid(grayTexMat, _tempSize, centerPoints, Calib3d.CALIB_CB_ASYMMETRIC_GRID));
        }
Exemple #14
0
    private void InitUndistortion(int image_width, int image_height)
    {
        double[,] dist, camera, newCamera;
        if (image_width == 360 && image_height == 640)
        {
            dist = new double[, ] {
                { -0.26671108837192126, 0.07901518944619403, 0.00015571985524697281, 0.0006010997461545253, -0.01058019944621336 }
            };
            camera = new double[, ] {
                { 298.68813717678285, 0.0, 166.9743006819531 },
                { 0.0, 298.0500351325468, 309.6193055827986 },
                { 0.0, 0.0, 1.0 }
            };
            newCamera = new double[, ] {
                { 250.33721923828125, 0.0, 180.40019593524147 },
                { 0.0, 240.51262156168622, 310.58069246672255 },
                { 0.0, 0.0, 1.0 }
            };
        }
        else if (image_width == 240 && image_height == 426)
        {
            dist = new double[, ] {
                { -0.3410095060673586, 0.13663490491793673, 0.0029517164668326173, 0.0031498764503194074, -0.02582006181844203 }
            };
            camera = new double[, ] {
                { 226.1512419110776, 0.0, 109.29523311868192 },
                { 0.0, 226.23590938020163, 205.63347777512897 },
                { 0.0, 0.0, 1.0 }
            };
            newCamera = new double[, ] {
                { 183.4514923095703, 0.0, 110.754281606527 },
                { 0.0, 165.92552947998047, 202.29188376754882 },
                { 0.0, 0.0, 1.0 }
            };
        }
        else
        {
            throw new NotImplementedException();
        }

        Mat distCoeffs      = FillMat(dist);
        Mat cameraMatrix    = FillMat(camera);
        Mat newCameraMatrix = FillMat(newCamera);

        Mat mapx = new Mat();
        Mat mapy = new Mat();
        // compute undistortion mapping & cache result
        Mat identity = Mat.eye(3, 3, CvType.CV_32FC1);

        Calib3d.initUndistortRectifyMap(cameraMatrix, distCoeffs, identity, newCameraMatrix,
                                        new Size(image_width, image_height), CvType.CV_32FC1, mapx, mapy);

        undistort.mapx = mapx;
        undistort.mapy = mapy;
    }
Exemple #15
0
    // Update is called once per frame
    void Update()
    {
        MatDisplay.SetCameraFoV(41.5f);

        Image cameraImage = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (cameraImage == null)
        {
            return;
        }
        if (_cameraImageMat == null)
        {
            //First frame -> generate Mat with same dimensions as camera feed
            _cameraImageMat = new Mat(cameraImage.Height, cameraImage.Width, CvType.CV_8UC4);
        }
        _cameraImageMat.put(0, 0, cameraImage.Pixels); // transferring image data to Mat

        if (FingerPointTarget.GetComponent <ImageTargetBehaviour>().CurrentStatus != TrackableBehaviour.Status.TRACKED)
        {
            MatDisplay.DisplayMat(_cameraImageMat, MatDisplaySettings.FULL_BACKGROUND);
            return;
        }

        FindHomographyPoints(out var matDst, out var matObj);
        var H = Calib3d.findHomography(matObj, matDst);

        try
        {
            var bWMat                   = GetBWSkinColor();
            var fingerTipCoor           = FindFingerTip(bWMat);
            var fingerPointInWorldSpace = FingerPointInWorldSpace(fingerTipCoor);
            FingerPlane.position = fingerPointInWorldSpace;

            var colorPixelValue = FindPixelValue(_cameraImageMat, Color.position);
            var drawPixelValue  = FindPixelValue(bWMat, Draw.position);

            if ((int)drawPixelValue.First() == 255)
            {
                //Debug.Log($"{colorPixelValue[0]}, {colorPixelValue[1]}, {colorPixelValue[2]}");
                //Debug.Log("Found Draw");
                //draw at finger pos

                var camMask = PaintCircle(fingerTipCoor);
                DrawMaskOnCanvas(camMask, H, colorPixelValue);
            }
        }
        catch
        {
        }

        var blendTex = BlendMats(H, _cameraImageMat, _drawingPlaceMat);

        MatDisplay.DisplayMat(blendTex, MatDisplaySettings.FULL_BACKGROUND);
    }
        private void DetectARUcoMarker()
        {
            if (downscaleRatio == 1)
            {
                downScaleRgbaMat = rgbaMat4Thread;
            }
            else
            {
                Imgproc.resize(rgbaMat4Thread, downScaleRgbaMat, new Size(), 1.0 / downscaleRatio, 1.0 / downscaleRatio, Imgproc.INTER_LINEAR);
            }
            Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

            // Detect markers and estimate Pose
            Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejected);

            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Position
                        double[] tvec = tvecs.get(i, 0);

                        // Rotation
                        double[] rv   = rvecs.get(i, 0);
                        Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                        rvec.put(0, 0, rv [0]);
                        rvec.put(1, 0, rv [1]);
                        rvec.put(2, 0, rv [2]);
                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)(tvec [2] / downscaleRatio)));

                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        hasUpdatedARTransformMatrix = true;

                        break;
                    }
                }
            }
        }
Exemple #17
0
        void Update()
        {
            if (Input.GetKeyDown(_interactibaleHotKey))
            {
                if (!interactable && _cameraTexture == null)
                {
                    Debug.LogWarning(logPrepend + "Missing camera texture.\n");
                    return;
                }
                interactable = !interactable;
            }

            if (!_cameraTexture)
            {
                return;
            }

            // Adapt resources.
            AdaptResources();

            if (_dirtyTexture)
            {
                // Undistort.
                if (_cameraTexture is WebCamTexture)
                {
                    Utils.webCamTextureToMat(_cameraTexture as WebCamTexture, _camTexMat, flip: false);
                }
                else if (_cameraTexture is Texture2D)
                {
                    Utils.fastTexture2DToMat(_cameraTexture as Texture2D, _camTexMat, flip: false);
                }
                else
                {
                    Debug.LogWarning(logPrepend + "Only Texture2D and WenCamTexture is supported.\n");
                    return;
                }
                TrackingToolsHelper.ColorMatToLumanceMat(_camTexMat, _camTexGrayMat);
                Calib3d.undistort(_camTexGrayMat, _camTexGrayUndistortMat, _cameraMatrix, _distCoeffs);
                Utils.fastMatToTexture2D(_camTexGrayUndistortMat, _undistortedCameraTexture);
                _dirtyTexture = false;
            }

            if (_interactable)
            {
                UpdateInteraction();
            }

            if (_dirtyPoints)
            {
                UpdateCameraTransform();
                _dirtyPoints = false;
            }
        }
Exemple #18
0
        private void EstimatePoseGridBoard(Mat rgbMat)
        {
            int valid = Aruco.estimatePoseBoard(corners, ids, gridBoard, camMatrix, distCoeffs, rvec, tvec);

            // if at least one board marker detected
            if (valid > 0)
            {
                // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                Calib3d.drawFrameAxes(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);

                UpdateARObjectTransform(rvec, tvec);
            }
        }
Exemple #19
0
        private void OnDetectionDone()
        {
            DebugUtils.TrackTick();

            if (displayCameraPreview)
            {
                Imgproc.cvtColor(downScaleMat, rgbMat4preview, Imgproc.COLOR_GRAY2RGB);

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                {
                                    // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                    Calib3d.drawFrameAxes(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                                }
                        }
                    }
                }

                Utils.fastMatToTexture2D(rgbMat4preview, texture);
            }

            if (applyEstimationPose)
            {
                if (hasUpdatedARTransformMatrix)
                {
                    hasUpdatedARTransformMatrix = false;

                    // Apply the cameraToWorld matrix with the Z-axis inverted.
                    ARM = arCamera.cameraToWorldMatrix * invertZM * ARM;

                    if (enableLerpFilter)
                    {
                        arGameObject.SetMatrix4x4(ARM);
                    }
                    else
                    {
                        ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                    }
                }
            }

            isDetecting = false;
        }
Exemple #20
0
        bool AdaptResources()
        {
            int w = _cameraTexture.width;
            int h = _cameraTexture.height;

            if (_processedCameraTexture != null && _processedCameraTexture.width == w && _processedCameraTexture.height == h)
            {
                return(true);
            }


            bool intrinsicsConversionSuccess = _cameraIntrinsics.ToOpenCV(ref _sensorMat, ref _distortionCoeffsMat, w, h);

            if (!intrinsicsConversionSuccess)
            {
                return(false);
            }
            _cameraIntrinsics.ApplyToCamera(_mainCamera);

            _projectorIntrinsicsCalibrator = new IntrinsicsCalibrator(w, h);

            _camTexGrayMat             = new Mat(h, w, CvType.CV_8UC1);
            _camTexGrayUndistortMat    = new Mat(h, w, CvType.CV_8UC1);
            _camTexGrayUndistortInvMat = new Mat(h, w, CvType.CV_8UC1);

            _processedCameraTexture      = new Texture2D(w, h, GraphicsFormat.R8_UNorm, 0, TextureCreationFlags.None);
            _processedCameraTexture.name = "ProcessedCameraTex";

            _arTexture      = new RenderTexture(w, h, 16, GraphicsFormat.R8G8B8A8_UNorm);
            _arTexture.name = "AR Texture";

            // Update circle pattern size.
            UpdateCirclePatternSize();

            // Create undistort map.
            Calib3d.initUndistortRectifyMap(_sensorMat, _distortionCoeffsMat, new Mat(), _sensorMat, new Size(_cameraTexture.width, _cameraTexture.height), CvType.CV_32FC1, _undistortMap1, _undistortMap2);

            // Start with a fixed projector FOV.
            //UpdateProjectorFOVManually( 1 );

            // Switch state.
            SwitchState(State.BlindCalibration);

            // Update UI.
            _processedCameraImage.texture = _processedCameraTexture;
            _arImage.texture = _arTexture;
            _cameraAspectFitter.aspectRatio = w / (float)h;
            _mainCamera.targetTexture       = _arTexture;

            return(true);
        }
        /// <summary>
        /// Gets the frontal face angle.
        /// </summary>
        /// <returns>The frontal face angle.</returns>
        /// <param name="points">Points.</param>
        public Vector3 getFrontalFaceAngle(List <Vector2> points)
        {
            if (points.Count != 68)
            {
                throw new ArgumentNullException("Invalid landmark_points.");
            }

            if (camMatrix == null)
            {
                throw new ArgumentNullException("Invalid camMatrix.");
            }

            //normalize

            float   normScale = Math.Abs(points [30].y - points [8].y) / (normHeight / 2);
            Vector2 normDiff  = points [30] * normScale - new Vector2(normWidth / 2, normHeight / 2);

            for (int i = 0; i < points.Count; i++)
            {
                normPoints [i] = points [i] * normScale - normDiff;
            }
            //Debug.Log ("points[30] " + points[30]);
            //Debug.Log ("normPoints[30] " + normPoints[30]);
            //Debug.Log ("normScale " + normScale);
            //Debug.Log ("normDiff " + normDiff);

            imagePoints.fromArray(
                new Point((normPoints [38].x + normPoints [41].x) / 2, (normPoints [38].y + normPoints [41].y) / 2), //l eye
                new Point((normPoints [43].x + normPoints [46].x) / 2, (normPoints [43].y + normPoints [46].y) / 2), //r eye
                new Point(normPoints [33].x, normPoints [33].y),                                                     //nose
                new Point(normPoints [48].x, normPoints [48].y),                                                     //l mouth
                new Point(normPoints [54].x, normPoints [54].y),                                                     //r mouth                         ,
                new Point(normPoints [0].x, normPoints [0].y),                                                       //l ear
                new Point(normPoints [16].x, normPoints [16].y)                                                      //r ear
                );


            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

            Calib3d.Rodrigues(rvec, rotM);

            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            ARM = invertYM * transformationM * invertZM;

            return(ExtractRotationFromMatrix(ref ARM).eulerAngles);
        }
Exemple #22
0
    // Warps cached_homoMat to outMat
    void HomographyTransform(ref Mat homoMat)
    {
        Corner_AR_Controller Homo_Controller = m_ARSessionManager.GetComponent <Corner_AR_Controller>();

        Point[] c2_scrpoints = Homo_Controller.GetScreenpoints(false);

        MatOfPoint2f initPoints = new MatOfPoint2f(regPointArray);
        MatOfPoint2f currPoints = new MatOfPoint2f(c2_scrpoints);

        Mat H = Calib3d.findHomography(initPoints, currPoints);

        Imgproc.warpPerspective(homoMat, outMat, H, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
        Core.flip(outMat, outMat, 0);
    }
Exemple #23
0
        void InitializeCameraMatrix(Mat inputImageMat, double fx, double fy, double cx, double cy, MatOfDouble distCoeffs)
        {
            Debug.Log("******************************");

            float width  = inputImageMat.width();
            float height = inputImageMat.height();

            // Set camera param
            _CamMatrix = new Mat(3, 3, CvType.CV_64FC1);
            _CamMatrix.put(0, 0, fx);
            _CamMatrix.put(0, 1, 0);
            _CamMatrix.put(0, 2, cx);
            _CamMatrix.put(1, 0, 0);
            _CamMatrix.put(1, 1, fy);
            _CamMatrix.put(1, 2, cy);
            _CamMatrix.put(2, 0, 0);
            _CamMatrix.put(2, 1, 0);
            _CamMatrix.put(2, 2, 1.0f);
            Debug.Log("CamMatrix " + _CamMatrix.dump());

            _DistCoeffs = distCoeffs;
            Debug.Log("DistCoeffs " + _DistCoeffs.dump());

            // Calibration camera
            Size   imageSize      = new Size(width, height);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(_CamMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("ImageSize " + imageSize.ToString());
            Debug.Log("ApertureWidth " + apertureWidth);
            Debug.Log("ApertureHeight " + apertureHeight);
            Debug.Log("Fovx " + fovx [0]);
            Debug.Log("Fovy " + fovy [0]);
            Debug.Log("FocalLength " + focalLength [0]);
            Debug.Log("PrincipalPoint " + principalPoint.ToString());
            Debug.Log("Aspectratio " + aspectratio [0]);

            Debug.Log("******************************");
        }
    private static Mat GetRotationMatrixFromRotationVector(Mat rotation_)
    {
        Mat rotationFull = new Mat(3, 3, CvType.CV_64FC1);

        // get full rotation matrix from rotation vector
        Calib3d.Rodrigues(rotation_, rotationFull);
        double[] LHSflipBackMatrix = new double[] { 1.0, 0.0, 0.0,
                                                    0.0, 1.0, 0.0,
                                                    0.0, 0.0, -1.0 };
        Mat LHSflipBackMatrixM = new Mat(3, 3, CvType.CV_64FC1);

        LHSflipBackMatrixM.put(0, 0, LHSflipBackMatrix);
        Mat rotLHS = rotationFull * LHSflipBackMatrixM; // invert Z (as we did before when savings points) to get from RHS -> LHS

        return(rotLHS);
    }
Exemple #25
0
    private void refresh()
    {
        var defaultPoints = defaultCornerPoints.Select(e => e.point).ToArray();
        var destPoints    = CornerPoints.Select(e => e.point).ToArray();

        using (var defaultCornerMat = new MatOfPoint2f(defaultPoints))
            using (var destCornerMat = new MatOfPoint2f(destPoints))
                using (var defaultMat = new MatOfPoint2f(defaultVertices.Select(e => new Point(e.x, e.y)).ToArray()))
                    using (var destMat = new MatOfPoint2f(meshFilter.mesh.vertices.Select(e => new Point(e.x, e.y)).ToArray()))
                    {
                        var h = Calib3d.findHomography(defaultCornerMat, destCornerMat);
                        OpenCVForUnity.CoreModule.Core.perspectiveTransform(defaultMat, destMat, h);
                        var vertices = destMat.toList().Select(e => new Vector3((float)e.x, (float)e.y, 0f)).ToList();//resultPoints.Select (e => new Vector3((float)e.x,(float)e.y,0f)).ToList();
                        meshFilter.mesh.SetVertices(vertices);
                    }
    }
Exemple #26
0
    private void Refresh()
    {
        //
        if (isFisheye)
        {
            Calib3d.fisheye_undistortImage(videoCaptureController.RGBMat, rgbMat, cameraMatrix, distCoeffs, newCameraMatrix);
        }
        else
        {
            Imgproc.remap(videoCaptureController.RGBMat, rgbMat, mapX, mapY, Imgproc.INTER_LINEAR);
            //Calib3d.undistort(videoCaptureController.RGBMat, rgbMat, cameraMatrix, distCoeffs, newCameraMatrix);
        }

        Core.flip(rgbMat, rgbMat, 0);
        Utils.fastMatToTexture2D(rgbMat, texture);
    }
    void ComputerVisionAlgo(IntPtr greyscale)
    {
        Utils.copyToMat(greyscale, imageMat);

        // Inverting Image pixel values
        MatOfKeyPoint keyMat = new MatOfKeyPoint();

        inMat = imageMat;
        // inMat = (Mat.ones(imageMat.rows(), imageMat.cols(), CvType.CV_8UC1) * 255) - imageMat;

        // Creating Detector (Yellow Circle)
        // MatOfKeyPoint keyMat = new MatOfKeyPoint();
        // SimpleBlobDetector detector = SimpleBlobDetector.create();

        double[] homo_points = m_ARSessionManager.GetComponent <AR_Controller>().GetHomopoints();

        // Display Homography Points
        // outMat = inMat;
        // Imgproc.circle(outMat, new Point(homo_points[0], homo_points[1]), 5, new Scalar(0.0, 0.0, 255.0));
        // Imgproc.circle(outMat, new Point(homo_points[2], homo_points[3]), 5, new Scalar(0.0, 0.0, 255.0));
        // Imgproc.circle(outMat, new Point(homo_points[4], homo_points[5]), 5, new Scalar(0.0, 0.0, 255.0));
        // Imgproc.circle(outMat, new Point(homo_points[6], homo_points[7]), 5, new Scalar(0.0, 0.0, 255.0));

        // Creating MatOfPoint2f arrays for Homography Points
        Point[] srcPointArray = new Point[4];
        for (int i = 0; i < 4; i++)
        {
            srcPointArray[i] = new Point(homo_points[2 * i], homo_points[(2 * i) + 1]);
        }

        Point[] dstPointArray = new Point[4];
        dstPointArray[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        dstPointArray[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        dstPointArray[2] = new Point(0.0, 0.0);
        dstPointArray[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        MatOfPoint2f srcPoints = new MatOfPoint2f(srcPointArray);
        MatOfPoint2f dstPoints = new MatOfPoint2f(dstPointArray);

        // Creating the H Matrix
        Mat Homo_Mat = Calib3d.findHomography(srcPoints, dstPoints);

        Debug.Log(Homo_Mat);
        Debug.Log(outMat.size());

        Imgproc.warpPerspective(inMat, outMat, Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
    void ComputerVisionAlgo(IntPtr greyscale)
    {
        // Utils.copyToMat(greyscale, imageMat);
        inMat = cached_initMat;

        Plane_AR_Controller Homo_Controller = m_ARSessionManager.GetComponent <Plane_AR_Controller>();

        Point[] c1_scrpoints = Homo_Controller.GetScreenpoints(true);
        Point[] c2_scrpoints = Homo_Controller.GetScreenpoints(false);

        MatOfPoint2f initPoints = new MatOfPoint2f(c1_scrpoints);
        MatOfPoint2f currPoints = new MatOfPoint2f(c2_scrpoints);

        Mat H = Calib3d.findHomography(initPoints, currPoints);

        Imgproc.warpPerspective(inMat, outMat, H, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }
Exemple #29
0
    public ImageObject warpImage(List <ImageObject> imageList)
    {
        Texture2D imgTexture = base64ImageToTexture(imageList[0].image);

        //Find the matching keypoints from the winner list.
        MatOfPoint2f queryPoints = new MatOfPoint2f();
        MatOfPoint2f matchPoints = new MatOfPoint2f();

        List <Point> queryPointsList = new List <Point>();
        List <Point> matchPointsList = new List <Point>();


        foreach (MatOfDMatch match in imageList[1].matches)
        {
            DMatch[] arrayDmatch = match.toArray();
            queryPointsList.Add(imageList[0].keyPoints.toList()[arrayDmatch[0].queryIdx].pt);
            matchPointsList.Add(imageList[1].keyPoints.toList()[arrayDmatch[0].trainIdx].pt);
        }
        queryPoints.fromList(queryPointsList);
        matchPoints.fromList(matchPointsList);

        //Calculate the homography of the best matching image
        //Mat homography = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 5.0);
        Mat homography    = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 3.0);
        Mat tempResultImg = new Mat();

        Imgproc.warpPerspective(imageList[0].imageMat, tempResultImg, homography, imageList[1].imageMat.size());

        //Show (red) annotations only
        ImageProcessor imageProcessor = new ImageProcessor();
        Mat            resultImg      = imageProcessor.ShowAnnotationsOnly(tempResultImg);

        //Show image
        Texture2D tempTexture = new Texture2D(imageList[1].imageMat.cols(), imageList[1].imageMat.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(resultImg, tempTexture);


        //Make black pixels transparent
        Texture2D texture = imageProcessor.removeColor(Color.black, tempTexture);

        return(new ImageObject(Convert.ToBase64String(texture.EncodeToPNG()), imageList[1].index));

        //return new ImageObject(Convert.ToBase64String(tempTexture.EncodeToPNG()), imageList[1].index);
    }
Exemple #30
0
    void Rectify(ref Point[] facePointArray, ref Mat cachedMat)
    {
        regPointArray[0] = new Point(0.0, HOMOGRAPHY_HEIGHT);
        regPointArray[1] = new Point(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT);
        regPointArray[2] = new Point(0.0, 0.0);
        regPointArray[3] = new Point(HOMOGRAPHY_WIDTH, 0.0);

        MatOfPoint2f srcPoints = new MatOfPoint2f(facePointArray);
        MatOfPoint2f regPoints = new MatOfPoint2f(regPointArray);

        Debug.LogFormat("Rectify Face Points; {0} \n {1} \n {2} \n {3}",
                        facePointArray[0], facePointArray[1], facePointArray[2], facePointArray[3]);

        // Creating the H Matrix
        Mat Homo_Mat = Calib3d.findHomography(srcPoints, regPoints);

        Imgproc.warpPerspective(imageMat, cachedMat, Homo_Mat, new Size(HOMOGRAPHY_WIDTH, HOMOGRAPHY_HEIGHT));
    }