/// <summary> /// 補正対象のテクスチャが初期化された際に補正用の設定を初期化する /// </summary> /// <param name="texture"></param> private void VideoCaptureController_ChangeTextureEvent(TextureHolderBase sender, Texture texture) { rgbMat = new Mat(texture.height, texture.width, CvType.CV_8UC3); if (this.texture != null) { if (this.texture.width != texture.width || this.texture.height != texture.height) { DestroyImmediate(this.texture); this.texture = null; } } if (this.texture == null) { this.texture = new Texture2D(texture.width, texture.height, TextureFormat.RGB24, false); } newCameraMatrix = Calib3d.getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, videoCaptureController.RGBMat.size(), 0, videoCaptureController.RGBMat.size()); if (isFisheye) { Calib3d.fisheye_initUndistortRectifyMap(this.cameraMatrix, this.distCoeffs, new Mat(), newCameraMatrix, videoCaptureController.RGBMat.size(), CvType.CV_32FC1, mapX, mapY); } else { Calib3d.initUndistortRectifyMap(this.cameraMatrix, this.distCoeffs, new Mat(), newCameraMatrix, videoCaptureController.RGBMat.size(), CvType.CV_32FC1, mapX, mapY); } // OnTextureInitialized(GetTexture()); }
public void OnUpdateIntrinsic(Mat cameraMatrix, Mat distCoeffs) { Clear(); this.cameraMatrix = cameraMatrix; this.distCoeffs = distCoeffs; newCameraMatrix = Calib3d.getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, videoCaptureController.RGBMat.size(), 0, videoCaptureController.RGBMat.size()); Calib3d.initUndistortRectifyMap(this.cameraMatrix, this.distCoeffs, new Mat(), newCameraMatrix, videoCaptureController.RGBMat.size(), CvType.CV_32FC1, mapX, mapY); }
private void InitUndistortion(int image_width, int image_height) { double[,] dist, camera, newCamera; if (image_width == 360 && image_height == 640) { dist = new double[, ] { { -0.26671108837192126, 0.07901518944619403, 0.00015571985524697281, 0.0006010997461545253, -0.01058019944621336 } }; camera = new double[, ] { { 298.68813717678285, 0.0, 166.9743006819531 }, { 0.0, 298.0500351325468, 309.6193055827986 }, { 0.0, 0.0, 1.0 } }; newCamera = new double[, ] { { 250.33721923828125, 0.0, 180.40019593524147 }, { 0.0, 240.51262156168622, 310.58069246672255 }, { 0.0, 0.0, 1.0 } }; } else if (image_width == 240 && image_height == 426) { dist = new double[, ] { { -0.3410095060673586, 0.13663490491793673, 0.0029517164668326173, 0.0031498764503194074, -0.02582006181844203 } }; camera = new double[, ] { { 226.1512419110776, 0.0, 109.29523311868192 }, { 0.0, 226.23590938020163, 205.63347777512897 }, { 0.0, 0.0, 1.0 } }; newCamera = new double[, ] { { 183.4514923095703, 0.0, 110.754281606527 }, { 0.0, 165.92552947998047, 202.29188376754882 }, { 0.0, 0.0, 1.0 } }; } else { throw new NotImplementedException(); } Mat distCoeffs = FillMat(dist); Mat cameraMatrix = FillMat(camera); Mat newCameraMatrix = FillMat(newCamera); Mat mapx = new Mat(); Mat mapy = new Mat(); // compute undistortion mapping & cache result Mat identity = Mat.eye(3, 3, CvType.CV_32FC1); Calib3d.initUndistortRectifyMap(cameraMatrix, distCoeffs, identity, newCameraMatrix, new Size(image_width, image_height), CvType.CV_32FC1, mapx, mapy); undistort.mapx = mapx; undistort.mapy = mapy; }
bool AdaptResources() { int w = _cameraTexture.width; int h = _cameraTexture.height; if (_processedCameraTexture != null && _processedCameraTexture.width == w && _processedCameraTexture.height == h) { return(true); } bool intrinsicsConversionSuccess = _cameraIntrinsics.ToOpenCV(ref _sensorMat, ref _distortionCoeffsMat, w, h); if (!intrinsicsConversionSuccess) { return(false); } _cameraIntrinsics.ApplyToCamera(_mainCamera); _projectorIntrinsicsCalibrator = new IntrinsicsCalibrator(w, h); _camTexGrayMat = new Mat(h, w, CvType.CV_8UC1); _camTexGrayUndistortMat = new Mat(h, w, CvType.CV_8UC1); _camTexGrayUndistortInvMat = new Mat(h, w, CvType.CV_8UC1); _processedCameraTexture = new Texture2D(w, h, GraphicsFormat.R8_UNorm, 0, TextureCreationFlags.None); _processedCameraTexture.name = "ProcessedCameraTex"; _arTexture = new RenderTexture(w, h, 16, GraphicsFormat.R8G8B8A8_UNorm); _arTexture.name = "AR Texture"; // Update circle pattern size. UpdateCirclePatternSize(); // Create undistort map. Calib3d.initUndistortRectifyMap(_sensorMat, _distortionCoeffsMat, new Mat(), _sensorMat, new Size(_cameraTexture.width, _cameraTexture.height), CvType.CV_32FC1, _undistortMap1, _undistortMap2); // Start with a fixed projector FOV. //UpdateProjectorFOVManually( 1 ); // Switch state. SwitchState(State.BlindCalibration); // Update UI. _processedCameraImage.texture = _processedCameraTexture; _arImage.texture = _arTexture; _cameraAspectFitter.aspectRatio = w / (float)h; _mainCamera.targetTexture = _arTexture; return(true); }
bool AdaptResources() { int w = _cameraTexture.width; int h = _cameraTexture.height; if (_processedCameraTexture != null && _processedCameraTexture.width == w && _processedCameraTexture.height == h) { return(true); // Already adapted. } // Get and apply intrinsics. bool success = _intrinsics.ToOpenCV(ref _sensorMat, ref _distortionCoeffsMat, w, h); if (!success) { return(false); } _intrinsics.ApplyToCamera(_mainCamera); // Create mats and textures. _camTexGrayMat = new Mat(h, w, CvType.CV_8UC1); _camTexGrayUndistortMat = new Mat(h, w, CvType.CV_8UC1); _processedCameraTexture = new Texture2D(w, h, GraphicsFormat.R8_UNorm, 0, TextureCreationFlags.None); _processedCameraTexture.name = "UndistortedCameraTex"; _processedCameraTexture.wrapMode = TextureWrapMode.Repeat; _arTexture = new RenderTexture(w, h, 16, GraphicsFormat.R8G8B8A8_UNorm); _arTexture.name = "AR Texture"; // Create undistort map (sensorMat remains unchanged even through it is passed as newCameraMatrix). Calib3d.initUndistortRectifyMap(_sensorMat, _distortionCoeffsMat, new Mat(), _sensorMat, new Size(_cameraTexture.width, _cameraTexture.height), CvType.CV_32FC1, _undistortMap1, _undistortMap2); // Update UI. _aspectFitter.aspectRatio = w / (float)h; _processedCameraImage.texture = _processedCameraTexture; _arImage.texture = _arTexture; _mainCamera.targetTexture = _arTexture; // Log. Debug.Log(logPrepend + "Tracking chessboard in camera image at " + w + "x" + h + "\n"); return(true); }