Exemplo n.º 1
0
        void AdaptResources()
        {
            int w = _cameraTexture.width;
            int h = _cameraTexture.height;

            if (_undistortedCameraTexture != null && _undistortedCameraTexture.width == w && _undistortedCameraTexture.height == h)
            {
                return;
            }

            _camTexMat              = TrackingToolsHelper.GetCompatibleMat(_cameraTexture);
            _camTexGrayMat          = new Mat(h, w, CvType.CV_8UC1);
            _camTexGrayUndistortMat = new Mat(h, w, CvType.CV_8UC1);

            _undistortedCameraTexture      = new Texture2D(w, h, GraphicsFormat.R8_UNorm, 0, TextureCreationFlags.None);
            _undistortedCameraTexture.name = "UndistortedCameraTex";

            _intrinsics.ToOpenCV(ref _cameraMatrix, ref _distCoeffs, w, h);

            // UI.
            _rawImageUI.texture         = _undistortedCameraTexture;
            _aspectFitterUI.aspectRatio = w / (float)h;

            // Forward the news.
            if (_arCamera)
            {
                _intrinsics.ApplyToCamera(_arCamera);
            }
        }
Exemplo n.º 2
0
        bool AdaptResources()
        {
            int w = _cameraTexture.width;
            int h = _cameraTexture.height;

            if (_processedCameraTexture != null && _processedCameraTexture.width == w && _processedCameraTexture.height == h)
            {
                return(true);
            }


            bool intrinsicsConversionSuccess = _cameraIntrinsics.ToOpenCV(ref _sensorMat, ref _distortionCoeffsMat, w, h);

            if (!intrinsicsConversionSuccess)
            {
                return(false);
            }
            _cameraIntrinsics.ApplyToCamera(_mainCamera);

            _projectorIntrinsicsCalibrator = new IntrinsicsCalibrator(w, h);

            _camTexGrayMat             = new Mat(h, w, CvType.CV_8UC1);
            _camTexGrayUndistortMat    = new Mat(h, w, CvType.CV_8UC1);
            _camTexGrayUndistortInvMat = new Mat(h, w, CvType.CV_8UC1);

            _processedCameraTexture      = new Texture2D(w, h, GraphicsFormat.R8_UNorm, 0, TextureCreationFlags.None);
            _processedCameraTexture.name = "ProcessedCameraTex";

            _arTexture      = new RenderTexture(w, h, 16, GraphicsFormat.R8G8B8A8_UNorm);
            _arTexture.name = "AR Texture";

            // Update circle pattern size.
            UpdateCirclePatternSize();

            // Create undistort map.
            Calib3d.initUndistortRectifyMap(_sensorMat, _distortionCoeffsMat, new Mat(), _sensorMat, new Size(_cameraTexture.width, _cameraTexture.height), CvType.CV_32FC1, _undistortMap1, _undistortMap2);

            // Start with a fixed projector FOV.
            //UpdateProjectorFOVManually( 1 );

            // Switch state.
            SwitchState(State.BlindCalibration);

            // Update UI.
            _processedCameraImage.texture = _processedCameraTexture;
            _arImage.texture = _arTexture;
            _cameraAspectFitter.aspectRatio = w / (float)h;
            _mainCamera.targetTexture       = _arTexture;

            return(true);
        }
        public bool UpdateExtrinsics(MatOfPoint3f patternPointsWorldMat, MatOfPoint2f patternPointsImageMat, Intrinsics intrinsics, int imageWidth, int imageHeight)
        {
            intrinsics.ToOpenCV(ref _sensorMatrix, imageWidth, imageHeight);

            // In order to match OpenCV's pixel space (zero at top-left) and Unity's camera space (up is positive), we flip the sensor matrix.
            _sensorMatrix.WriteValue(-_sensorMatrix.ReadValue(1, 1), 1, 1);                  // fy
            _sensorMatrix.WriteValue(imageHeight - _sensorMatrix.ReadValue(1, 2), 1, 2);     // cy

            // Find pattern pose, relative to camera (at zero position) using solvePnP.
            _isValid = Calib3d.solvePnP(patternPointsWorldMat, patternPointsImageMat, _sensorMatrix, _noDistCoeffs, _rotationVecMat, _translationVecMat);

            if (_isValid)
            {
                _extrinsics.UpdateFromOpenCvSolvePnp(_rotationVecMat, _translationVecMat);
            }

            return(_isValid);
        }
Exemplo n.º 4
0
        bool AdaptResources()
        {
            int w = _cameraTexture.width;
            int h = _cameraTexture.height;

            if (_processedCameraTexture != null && _processedCameraTexture.width == w && _processedCameraTexture.height == h)
            {
                return(true);                                                                                                                           // Already adapted.
            }
            // Get and apply intrinsics.
            bool success = _intrinsics.ToOpenCV(ref _sensorMat, ref _distortionCoeffsMat, w, h);

            if (!success)
            {
                return(false);
            }

            _intrinsics.ApplyToCamera(_mainCamera);

            // Create mats and textures.
            _camTexGrayMat                   = new Mat(h, w, CvType.CV_8UC1);
            _camTexGrayUndistortMat          = new Mat(h, w, CvType.CV_8UC1);
            _processedCameraTexture          = new Texture2D(w, h, GraphicsFormat.R8_UNorm, 0, TextureCreationFlags.None);
            _processedCameraTexture.name     = "UndistortedCameraTex";
            _processedCameraTexture.wrapMode = TextureWrapMode.Repeat;
            _arTexture      = new RenderTexture(w, h, 16, GraphicsFormat.R8G8B8A8_UNorm);
            _arTexture.name = "AR Texture";

            // Create undistort map (sensorMat remains unchanged even through it is passed as newCameraMatrix).
            Calib3d.initUndistortRectifyMap(_sensorMat, _distortionCoeffsMat, new Mat(), _sensorMat, new Size(_cameraTexture.width, _cameraTexture.height), CvType.CV_32FC1, _undistortMap1, _undistortMap2);

            // Update UI.
            _aspectFitter.aspectRatio     = w / (float)h;
            _processedCameraImage.texture = _processedCameraTexture;
            _arImage.texture          = _arTexture;
            _mainCamera.targetTexture = _arTexture;

            // Log.
            Debug.Log(logPrepend + "Tracking chessboard in camera image at " + w + "x" + h + "\n");

            return(true);
        }
Exemplo n.º 5
0
        /// <summary>
        /// Update the extrinsics of projector relative to camera.
        /// </summary>
        /// <param name="cameraIntrinsics"></param>
        /// <param name="projectorIntrinsics"></param>
        /// <param name="textureSize"></param>
        public void Update(Intrinsics cameraIntrinsics, Intrinsics projectorIntrinsics, Size textureSize)
        {
            int w = (int)textureSize.width;
            int h = (int)textureSize.height;

            cameraIntrinsics.ToOpenCV(ref _cameraSensorMat, w, h);
            projectorIntrinsics.ToOpenCV(ref _projectorSensorMat, w, h);

            // In order to match OpenCV's pixel space (zero at top-left) and Unity's camera space (up is positive), we flip the sensor matrix.

            _cameraSensorMat.WriteValue(-_cameraSensorMat.ReadValue(1, 1), 1, 1);                           // fy
            _cameraSensorMat.WriteValue(textureSize.height - _cameraSensorMat.ReadValue(1, 2), 1, 2);       // cy
            _projectorSensorMat.WriteValue(-_projectorSensorMat.ReadValue(1, 1), 1, 1);                     // fy
            _projectorSensorMat.WriteValue(textureSize.height - _projectorSensorMat.ReadValue(1, 2), 1, 2); // cy

            int flag = 0;

            // Don't recompute and change intrinsics parameters.
            flag |= Calib3d.CALIB_FIX_INTRINSIC;

            // Don't recompute distortions, ignore them. We assume the incoming points have already bee undistorted.
            flag |=
                Calib3d.CALIB_FIX_TANGENT_DIST |
                Calib3d.CALIB_FIX_K1 |
                Calib3d.CALIB_FIX_K2 |
                Calib3d.CALIB_FIX_K3 |
                Calib3d.CALIB_FIX_K4 |
                Calib3d.CALIB_FIX_K5;

            // Compute!
            Calib3d.stereoCalibrate
            (
                _patternWorldSamples, _cameraPatternImageSamples, _projectorPatternImageSamples,
                _cameraSensorMat, _noDistCoeffs,
                _projectorSensorMat, _noDistCoeffs,
                textureSize,
                _rotation3x3Mat, _translationVecMat, _essentialMat, _fundamentalMat,
                flag
            );

            _extrinsics.UpdateFromOpenCvStereoCalibrate(_rotation3x3Mat, _translationVecMat);
        }