/// <summary> /// Given the left and right image, computer the disparity map and the 3D point cloud. /// </summary> /// <param name="left">The left image</param> /// <param name="right">The right image</param> /// <param name="disparityMap">The left disparity map</param> /// <param name="points">The 3D point cloud within a [-0.5, 0.5] cube</param> private static void Computer3DPointsFromStereoPair(Image <Gray, Byte> left, Image <Gray, Byte> right, out Image <Gray, short> disparityMap, out MCvPoint3D32f[] points) { Size size = left.Size; disparityMap = new Image <Gray, short>(size); //using (StereoSGBM stereoSolver = new StereoSGBM(5, 64, 0)) using (StereoBM stereoSolver = new StereoBM()) //using (Mat dm = new Mat()) { stereoSolver.Compute(left, right, disparityMap); float scale = Math.Max(size.Width, size.Height); //Construct a simple Q matrix, if you have a matrix from cvStereoRectify, you should use that instead using (Matrix <double> q = new Matrix <double>( new double[, ] { { 1.0, 0.0, 0.0, -size.Width / 2 }, //shift the x origin to image center { 0.0, -1.0, 0.0, size.Height / 2 }, //shift the y origin to image center and flip it upside down { 0.0, 0.0, -1.0, 0.0 }, //Multiply the z value by -1.0, { 0.0, 0.0, 0.0, scale } })) //scale the object's corrdinate to within a [-0.5, 0.5] cube points = PointCollection.ReprojectImageTo3D(disparityMap, q); } }
/// <summary> /// Given the left and right image, computer the disparity map and the 3D point cloud. /// </summary> /// <param name="left">The left image</param> /// <param name="right">The right image</param> /// <param name="outputDisparityMap">The left disparity map</param> /// <param name="points">The 3D point cloud within a [-0.5, 0.5] cube</param> private static void Computer3DPointsFromStereoPair(IInputArray left, IInputArray right, Mat outputDisparityMap, Mat points) { Size size; using (InputArray ia = left.GetInputArray()) size = ia.GetSize(); using (StereoBM stereoSolver = new StereoBM()) { stereoSolver.Compute(left, right, outputDisparityMap); float scale = Math.Max(size.Width, size.Height); //Construct a simple Q matrix, if you have a matrix from cvStereoRectify, you should use that instead using (Matrix <double> q = new Matrix <double>( new double[, ] { { 1.0, 0.0, 0.0, -size.Width / 2 }, //shift the x origin to image center { 0.0, -1.0, 0.0, size.Height / 2 }, //shift the y origin to image center and flip it upside down { 0.0, 0.0, -1.0, 0.0 }, //Multiply the z value by -1.0, { 0.0, 0.0, 0.0, scale } })) //scale the object's coordinate to within a [-0.5, 0.5] cube { CvInvoke.ReprojectImageTo3D(outputDisparityMap, points, q, false, DepthType.Cv32F); } //points = PointCollection.ReprojectImageTo3D(outputDisparityMap, q); } }
/// <summary> /// Given the left and right image, computer the disparity map and the 3D point cloud. /// </summary> /// <param name="left">The left image</param> /// <param name="right">The right image</param> /// <param name="outputDisparityMap">The left disparity map</param> /// <param name="points">The 3D point cloud within a [-0.5, 0.5] cube</param> private static void Computer3DPointsFromStereoPair(IInputArray left, IInputArray right, Mat outputDisparityMap, Mat points, bool handleMissingValues = true) { System.Drawing.Size size; using (InputArray ia = left.GetInputArray()) size = ia.GetSize(); using (StereoBM leftMatcher = new StereoBM()) using (RightMatcher rightMatcher = new RightMatcher(leftMatcher)) using (Mat leftDisparity = new Mat()) using (Mat rightDisparity = new Mat()) using (DisparityWLSFilter wls = new DisparityWLSFilter(leftMatcher)) { leftMatcher.Compute(left, right, leftDisparity); rightMatcher.Compute(right, left, rightDisparity); wls.Filter(leftDisparity, left, outputDisparityMap, rightDisparity, rightView: right); float scale = Math.Max(size.Width, size.Height); //Construct a simple Q matrix, if you have a matrix from cvStereoRectify, you should use that instead using (Matrix <double> q = new Matrix <double>( new double[, ] { { 1.0, 0.0, 0.0, -size.Width / 2 }, //shift the x origin to image center { 0.0, -1.0, 0.0, size.Height / 2 }, //shift the y origin to image center and flip it upside down { 0.0, 0.0, -1.0, 0.0 }, //Multiply the z value by -1.0, { 0.0, 0.0, 0.0, scale } })) //scale the object's coordinate to within a [-0.5, 0.5] cube { CvInvoke.ReprojectImageTo3D(outputDisparityMap, points, q, handleMissingValues, DepthType.Cv32F); //CvInvoke.ReprojectImageTo3D(leftDisparity, points, q, false, DepthType.Cv32F); //CvInvoke.ReprojectImageTo3D(leftDisparity, points, q, handleMissingValues, DepthType.Cv32F); } //points = PointCollection.ReprojectImageTo3D(outputDisparityMap, q); } }
public AlgorithmResult DetectDisparity( string filenameL, string filenameR, int numberOfDisparities, int blockSize) { AlgorithmResult result = new AlgorithmResult(); Image <Bgr, byte> imageLeft = ImageHelper.GetImage(filenameL); Image <Bgr, byte> imageRight = ImageHelper.GetImage(filenameR); var resultImage = new Image <Bgr, byte>(imageLeft.Width, imageLeft.Height); // Create new (gray, float) image for disparity var imageDisparity = new Image <Gray, float>(imageLeft.Size); StereoBM stereoBM = new StereoBM(numberOfDisparities, blockSize); StereoMatcherExtensions.Compute( stereoBM, imageLeft.Convert <Gray, byte>(), imageRight.Convert <Gray, byte>(), imageDisparity); // Normalize CvInvoke.Normalize(imageDisparity, imageDisparity, 0, 255, NormType.MinMax, DepthType.Cv8U); // Set resultImage after normalizing resultImage = imageDisparity.Convert <Bgr, byte>(); result.ImageArray = ImageHelper.SetImage(resultImage); return(result); }
private void Display3D(Mat left, Mat right) { //TODO: try to use StereoSGBM using (StereoBM stereoSolver = new StereoBM()) { Mat output = new Mat(); Mat left8bit = ConvertInto8bitMat(left); Mat right8bit = ConvertInto8bitMat(right); stereoSolver.Compute(left8bit, right8bit, output); Mat points = new Mat(); float scale = Math.Max(left.Size.Width, left.Size.Height); if (!_isCalibrate) { Q = new Matrix <double>( new double[, ] { { 1.0, 0.0, 0.0, -left.Width / 2 }, //shift the x origin to image center { 0.0, -1.0, 0.0, left.Height / 2 }, //shift the y origin to image center and flip it upside down { 0.0, 0.0, -1.0, 0.0 }, //Multiply the z value by -1.0, { 0.0, 0.0, 0.0, scale } }); _isCalibrate = true; } //Construct a simple Q matrix, if you have a matrix from cvStereoRectify, you should use that instead //scale the object's coordinate to within a [-0.5, 0.5] cube if (_isCameraMatrixCount) { Mat map11 = new Mat(); Mat map12 = new Mat(); Mat map21 = new Mat(); Mat map22 = new Mat(); CvInvoke.InitUndistortRectifyMap(cameraMatrix1, distCoeff1, R1, P1, left8bit.Size, DepthType.Cv16S, map11, map12); CvInvoke.InitUndistortRectifyMap(cameraMatrix2, distCoeff2, R2, P2, left8bit.Size, DepthType.Cv16S, map21, map22); Mat img1r = new Mat(); Mat img2r = new Mat(); CvInvoke.Remap(left8bit, img1r, map11, map12, Inter.Linear); CvInvoke.Remap(right8bit, img2r, map21, map22, Inter.Linear); left8bit = img1r; right8bit = img2r; } //stereoSolver.FindStereoCorrespondence(left, right, disparityMap); CvInvoke.ReprojectImageTo3D(output, points, Q, false, DepthType.Cv32F); //points = PointCollection.ReprojectImageTo3D(output, Q); Mat pointsArray = points.Reshape(points.NumberOfChannels, points.Rows * points.Cols); Mat colorArray = left.Reshape(left.NumberOfChannels, left.Rows * left.Cols); Mat colorArrayFloat = new Mat(); colorArray.ConvertTo(colorArrayFloat, DepthType.Cv32F); WCloud cloud = new WCloud(pointsArray, colorArray); Display3DImage(cloud); //points = PointCollection.ReprojectImageTo3D(outputDisparityMap, q); } }
/// <summary> /// BM算法 /// </summary> private void bmBtn_Click(object sender, EventArgs e) { if (BM_NumOfDis == 0 || BM_BlockSize == 0) { Data.LogString = "[error] BM初始化参数错误"; return; } bm = new StereoBM(BM_NumOfDis, BM_BlockSize); //初始化BM类 BM_CAL_FLAG = true; }
public DepthMapBuilder(ICommandBus commandBus) { _commandBus = commandBus; _emitDelay = TimeSpan.FromMilliseconds(1000.0 / DEPTH_MAP_PER_SECONDS); _stereoMatcher = StereoBM.Create(); _stereoMatcher.MinDisparity = 4; _stereoMatcher.NumDisparities = 128; _stereoMatcher.BlockSize = 21; _stereoMatcher.SpeckleRange = 16; _stereoMatcher.SpeckleWindowSize = 45; }
public override Mat ComputeDepthMap(Image <Bgr, byte> leftImage, Image <Bgr, byte> rightImage) { StereoBM _stereoBM = CreateStereoBM(); ConvertImageToGray(leftImage, rightImage); Mat imageDisparity = new Mat(); Mat imageToSave = new Mat(); _stereoBM.Compute(LeftGrayImage, RightGrayImage, imageDisparity); imageDisparity.ConvertTo(imageToSave, DepthType.Cv8U); return(imageDisparity); }
//窗体初始化 private void DisparityMeasure_Load(object sender, EventArgs e) { //初始化BM、SGBM类对象 bmMatch = StereoBM.Create(); sgbmMatch = StereoSGBM.Create(sgbm_minDisparity, sgbm_numofDisparities, sgbm_blockSize, sgbm_P1, sgbm_P2, sgbm_disp12MaxDiff, sgbm_preFilterCap, sgbm_uniquenessRatio, sgbm_speckleWindowSize, sgbm_speckleRange, sgbm_Mode); //初始化定时器 //dmTimer = new System.Threading.Timer(disparitymeTime, null, -1, 50); timer_disparityMeasure.Enabled = true; timer_disparityMeasure.Stop(); //计算有效视差区 //初始化BM参数 this.ucTrackBar_preFilterSize.Value = bm_preFilterSize; this.textBox_preFilterSize.Text = bm_preFilterSize.ToString(); this.ucTrackBar_preFilterCap.Value = bm_preFilterCap; this.textBox_preFilterCap.Text = bm_preFilterCap.ToString(); this.ucTrackBar_SADWinSize.Value = bm_SADWinSize; this.textBox_SADWinSize.Text = bm_SADWinSize.ToString(); this.ucTrackBar_minDIsparity.Value = bm_minDisparity; this.textBox_minDisparity.Text = bm_minDisparity.ToString(); //16倍倍数关系 this.ucTrackBar_numOfDis.Value = bm_numOfDisparities / 16; this.textBox_numOfDis.Text = bm_numOfDisparities.ToString(); this.ucTrackBar_uniquenessRatio.Value = bm_uniquenessRatio; this.textBox_uniquenessRatio.Text = bm_uniquenessRatio.ToString(); this.ucTrackBar_textureThre.Value = bm_textureThreshold; this.textBox_textureThreshold.Text = bm_textureThreshold.ToString(); this.ucTrackBar_speckleWinSize.Value = bm_speckleWinSize; this.textBox_speckleWinSize.Text = bm_speckleWinSize.ToString(); this.ucTrackBar_speckleRange.Value = bm_speckleRange; this.textBox_speckleRange.Text = bm_speckleRange.ToString(); this.ucTrackBar_disp12MaxDiff.Value = bm_disp12MaxDiff; this.textBox_disp12MaxDiff.Text = bm_disp12MaxDiff.ToString(); //初始化SGBM参数 this.numericUpDown_preFilterCap.Value = sgbm_preFilterCap; this.numericUpDown_sadWinSize.Value = sgbm_blockSize; this.numericUpDown_minDisparity.Value = sgbm_minDisparity; this.numericUpDown_numOfDisparities.Value = sgbm_numofDisparities; this.numericUpDown_p1.Value = sgbm_P1; this.numericUpDown_p2.Value = sgbm_P2; this.numericUpDown_uniquenessRatio.Value = sgbm_uniquenessRatio; this.numericUpDown_disp12MaxDiff.Value = sgbm_disp12MaxDiff; this.numericUpDown_speckleWinSize.Value = sgbm_speckleWindowSize; this.numericUpDown_speckcleRange.Value = sgbm_speckleRange; }
public void SimpleCompute() { var left = Image("tsukuba_left.png", ImreadModes.GrayScale); var right = Image("tsukuba_right.png", ImreadModes.GrayScale); var sbm = StereoBM.Create(); var disparity = new Mat(); sbm.Compute(left, right, disparity); /* * double min, max; * Cv2.MinMaxLoc(disparity, out min, out max); * * var disparityU8 = new Mat(); * disparity.ConvertTo(disparityU8, MatType.CV_8UC1, 255 / (max - min), -255 * min / (max - min)); * Window.ShowImages(disparityU8); * //*/ }
public void SimpleCompute() { var left = Image("tsukuba_left.png", ImreadModes.Grayscale); var right = Image("tsukuba_right.png", ImreadModes.Grayscale); var sbm = StereoBM.Create(); var disparity = new Mat(); sbm.Compute(left, right, disparity); if (Debugger.IsAttached) { Cv2.MinMaxLoc(disparity, out double min, out double max); var disparityU8 = new Mat(); disparity.ConvertTo(disparityU8, MatType.CV_8UC1, 255 / (max - min), -255 * min / (max - min)); Window.ShowImages(disparityU8); } }
/// <summary> /// Given the left and right image, computer the disparity map and the 3D point cloud. /// </summary> /// <param name="left">The left image</param> /// <param name="right">The right image</param> /// <param name="disparityMap">The left disparity map</param> /// <param name="points">The 3D point cloud within a [-0.5, 0.5] cube</param> private static void Computer3DPointsFromStereoPair(Image <Gray, Byte> left, Image <Gray, Byte> right, out Image <Gray, short> disparityMap, out MCvPoint3D32f[] points) { Size size = left.Size; disparityMap = new Image <Gray, short>(size); //using (StereoSGBM stereoSolver = new StereoSGBM(5, 64, 0, 0, 0, 0, 0, 0, 0, 0, false)) using (StereoBM stereoSolver = new StereoBM(Emgu.CV.CvEnum.STEREO_BM_TYPE.BASIC, 0)) { stereoSolver.FindStereoCorrespondence(left, right, disparityMap); //Construct a simple Q matrix, if you have a matrix from cvStereoRectify, you should use that instead using (Matrix <double> q = new Matrix <double>( new double[, ] { { 1.0, 0.0, 0.0, -size.Width / 2 }, //shift the x origin to image center { 0.0, 1.0, 0.0, -size.Height / 2 }, //shift the y origin to image center { 0.0, 0.0, 1.0, 0.0 }, //Multiply the z value by 1.0, { 0.0, 0.0, 0.0, 1.0 } })) points = PointCollection.ReprojectImageTo3D(disparityMap, q); } }
// Use this for initialization void Start() { //Read the left and right images Texture2D texLeft = Resources.Load("tsukuba_l") as Texture2D; Texture2D texRight = Resources.Load("tsukuba_r") as Texture2D; Mat imgLeft = new Mat(texLeft.height, texLeft.width, CvType.CV_8UC1); Mat imgRight = new Mat(texRight.height, texRight.width, CvType.CV_8UC1); Utils.texture2DToMat(texLeft, imgLeft); Utils.texture2DToMat(texRight, imgRight); //or //Mat imgLeft = Imgcodecs.imread (Utils.getFilePath ("tsukuba_l.png"), Imgcodecs.IMREAD_GRAYSCALE); //Mat imgRight = Imgcodecs.imread (Utils.getFilePath ("tsukuba_r.png"), Imgcodecs.IMREAD_GRAYSCALE); Mat imgDisparity16S = new Mat(imgLeft.rows(), imgLeft.cols(), CvType.CV_16S); Mat imgDisparity8U = new Mat(imgLeft.rows(), imgLeft.cols(), CvType.CV_8UC1); if (imgLeft.empty() || imgRight.empty()) { Debug.Log("Error reading images "); } StereoBM sbm = StereoBM.create(16, 15); sbm.compute(imgLeft, imgRight, imgDisparity16S); //normalize to CvType.CV_8U Core.normalize(imgDisparity16S, imgDisparity8U, 0, 255, Core.NORM_MINMAX, CvType.CV_8U); Texture2D texture = new Texture2D(imgDisparity8U.cols(), imgDisparity8U.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgDisparity8U, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
internal static extern void cvFindStereoCorrespondenceBM(Arr left, Arr right, Arr disparity, StereoBM state);