private void Any_ValueChanged(object sender, EventArgs e) { try { Mat disparityMap = new Mat(); StereoSGBM sgbm = new StereoSGBM( tBminDisparity.Value, tBnumDisparities.Value, tBblockSize.Value * 2 - 1, tBP1.Value, tBP2.Value, tBdisp12MaxDiff.Value, tBpreFilterCap.Value, tBuniquenessRatio.Value, tBspecleWindowSize.Value, tBspecleRange.Value, (rBSBGM.Checked) ? StereoSGBM.Mode.SGBM : StereoSGBM.Mode.HH); sgbm.Compute(leftIm, rightIm, disparityMap); show = new Mat(); disparityMap.ConvertTo(show, DepthType.Cv8U); iBdisparity.Image = show; lLog.Text = author; } catch (Exception ex) { lLog.Text = "Log: " + ex.Message; } GC.Collect(); }
/// <summary> /// Given the left and right image, computer the disparity map and the 3D point cloud. /// </summary> /// <param name="left">The left image</param> /// <param name="right">The right image</param> /// <param name="disparityMap">The left disparity map</param> /// <param name="points">The 3D point cloud within a [-0.5, 0.5] cube</param> private void Computer3DPointsFromStereoPair(Image <Gray, Byte> left, Image <Gray, Byte> right, out Image <Gray, short> disparityMap, out MCvPoint3D32f[] points) { Size size = left.Size; disparityMap = new Image <Gray, short>(size); int P1 = 8 * 1 * Calibration.SAD * Calibration.SAD; //GetSliderValue(P1_Slider); int P2 = 32 * 1 * Calibration.SAD * Calibration.SAD; //GetSliderValue(P2_Slider); using (StereoSGBM stereoSolver = new StereoSGBM( Calibration.MinDisparities, Calibration.NumDisparities, Calibration.SAD, P1, P2, Calibration.MaxDiff, Calibration.PrefilterCap, Calibration.UniquenessRatio, Calibration.Speckle, Calibration.SpeckleRange, Calibration.DisparityMode)) //using (StereoBM stereoSolver = new StereoBM(Emgu.CV.CvEnum.STEREO_BM_TYPE.BASIC, 0)) { stereoSolver.FindStereoCorrespondence(left, right, disparityMap);//Computes the disparity map using: /*GC: graph cut-based algorithm * BM: block matching algorithm * SGBM: modified H. Hirschmuller algorithm HH08*/ points = PointCollection.ReprojectImageTo3D(disparityMap, Calibration.Q); //Reprojects disparity image to 3D space. } }
public StereoCorrespondence() { // cvFindStereoCorrespondenceBM + cvFindStereoCorrespondenceGC // ブロックマッチング, グラフカットの両アルゴリズムによるステレオマッチング // 入力画像の読み込み using (IplImage imgLeft = new IplImage(Const.ImageTsukubaLeft, LoadMode.GrayScale)) using (IplImage imgRight = new IplImage(Const.ImageTsukubaRight, LoadMode.GrayScale)) { // 視差画像, 出力画像の領域を確保 using (IplImage dispBM = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (IplImage dispLeft = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (IplImage dispRight = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (IplImage dstBM = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (IplImage dstGC = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (IplImage dstAux = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (Mat dstSGBM = new Mat()) { // 距離計測とスケーリング int sad = 3; using (CvStereoBMState stateBM = new CvStereoBMState(StereoBMPreset.Basic, 16)) using (CvStereoGCState stateGC = new CvStereoGCState(16, 2)) using (StereoSGBM sgbm = new StereoSGBM() { MinDisparity = 0, NumberOfDisparities = 32, PreFilterCap = 63, SADWindowSize = sad, P1 = 8 * imgLeft.NChannels * sad * sad, P2 = 32 * imgLeft.NChannels * sad * sad, UniquenessRatio = 10, SpeckleWindowSize = 100, SpeckleRange = 32, Disp12MaxDiff = 1, FullDP = false, }) { Cv.FindStereoCorrespondenceBM(imgLeft, imgRight, dispBM, stateBM); // stateBM.FindStereoCorrespondence(imgLeft, imgRight, dispBM); Cv.FindStereoCorrespondenceGC(imgLeft, imgRight, dispLeft, dispRight, stateGC, false); // stateGC.FindStereoCorrespondence(imgLeft, imgRight, dispLeft, dispRight, false); Cv.FindStereoCorrespondence(imgLeft, imgRight, DisparityMode.Birchfield, dstAux, 50, 25, 5, 12, 15, 25); sgbm.FindCorrespondence(new Mat(imgLeft), new Mat(imgRight), dstSGBM); Cv.ConvertScale(dispBM, dstBM, 1); Cv.ConvertScale(dispLeft, dstGC, -16); Cv.ConvertScale(dstAux, dstAux, 16); dstSGBM.ConvertTo(dstSGBM, dstSGBM.Type, 32, 0); using (new CvWindow("Stereo Correspondence (BM)", dstBM)) using (new CvWindow("Stereo Correspondence (GC)", dstGC)) using (new CvWindow("Stereo Correspondence (cvaux)", dstAux)) using (new CvWindow("Stereo Correspondence (SGBM)", dstSGBM.ToIplImage())) { Cv.WaitKey(); } } } } }
public void Run() { // Load left&right images using (var imgLeft = new IplImage(FilePath.Image.TsukubaLeft, LoadMode.GrayScale)) using (var imgRight = new IplImage(FilePath.Image.TsukubaRight, LoadMode.GrayScale)) { // output image buffers using (var dispBM = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (var dispLeft = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (var dispRight = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (var dstBM = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (var dstGC = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (var dstAux = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (var dstSGBM = new Mat()) { // measures distance and scales const int sad = 3; using (var stateBM = new CvStereoBMState(StereoBMPreset.Basic, 16)) using (var stateGC = new CvStereoGCState(16, 2)) using (var sgbm = new StereoSGBM() // C++ { MinDisparity = 0, NumberOfDisparities = 32, PreFilterCap = 63, SADWindowSize = sad, P1 = 8 * imgLeft.NChannels * sad * sad, P2 = 32 * imgLeft.NChannels * sad * sad, UniquenessRatio = 10, SpeckleWindowSize = 100, SpeckleRange = 32, Disp12MaxDiff = 1, FullDP = false, }) { Cv.FindStereoCorrespondenceBM(imgLeft, imgRight, dispBM, stateBM); Cv.FindStereoCorrespondenceGC(imgLeft, imgRight, dispLeft, dispRight, stateGC, false); Cv.FindStereoCorrespondence(imgLeft, imgRight, DisparityMode.Birchfield, dstAux, 50, 25, 5, 12, 15, 25); // cvaux sgbm.Compute(new Mat(imgLeft), new Mat(imgRight), dstSGBM); Cv.ConvertScale(dispBM, dstBM, 1); Cv.ConvertScale(dispLeft, dstGC, -16); Cv.ConvertScale(dstAux, dstAux, 16); dstSGBM.ConvertTo(dstSGBM, dstSGBM.Type(), 32, 0); using (new CvWindow("Stereo Correspondence (BM)", dstBM)) using (new CvWindow("Stereo Correspondence (GC)", dstGC)) using (new CvWindow("Stereo Correspondence (cvaux)", dstAux)) using (new CvWindow("Stereo Correspondence (SGBM)", dstSGBM.ToIplImage())) { Cv.WaitKey(); } } } } }
public void Run() { // Load left&right images using (IplImage imgLeft = new IplImage(FilePath.TsukubaLeft, LoadMode.GrayScale)) using (IplImage imgRight = new IplImage(FilePath.TsukubaRight, LoadMode.GrayScale)) { // output image buffers using (IplImage dispBM = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (IplImage dispLeft = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (IplImage dispRight = new IplImage(imgLeft.Size, BitDepth.S16, 1)) using (IplImage dstBM = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (IplImage dstGC = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (IplImage dstAux = new IplImage(imgLeft.Size, BitDepth.U8, 1)) using (Mat dstSGBM = new Mat()) { // measures distance and scales int sad = 3; using (CvStereoBMState stateBM = new CvStereoBMState(StereoBMPreset.Basic, 16)) using (CvStereoGCState stateGC = new CvStereoGCState(16, 2)) using (StereoSGBM sgbm = new StereoSGBM() // C++ { MinDisparity = 0, NumberOfDisparities = 32, PreFilterCap = 63, SADWindowSize = sad, P1 = 8 * imgLeft.NChannels * sad * sad, P2 = 32 * imgLeft.NChannels * sad * sad, UniquenessRatio = 10, SpeckleWindowSize = 100, SpeckleRange = 32, Disp12MaxDiff = 1, FullDP = false, }) { Cv.FindStereoCorrespondenceBM(imgLeft, imgRight, dispBM, stateBM); Cv.FindStereoCorrespondenceGC(imgLeft, imgRight, dispLeft, dispRight, stateGC, false); Cv.FindStereoCorrespondence(imgLeft, imgRight, DisparityMode.Birchfield, dstAux, 50, 25, 5, 12, 15, 25); // cvaux sgbm.Compute(new Mat(imgLeft), new Mat(imgRight), dstSGBM); Cv.ConvertScale(dispBM, dstBM, 1); Cv.ConvertScale(dispLeft, dstGC, -16); Cv.ConvertScale(dstAux, dstAux, 16); dstSGBM.ConvertTo(dstSGBM, dstSGBM.Type(), 32, 0); using (new CvWindow("Stereo Correspondence (BM)", dstBM)) using (new CvWindow("Stereo Correspondence (GC)", dstGC)) using (new CvWindow("Stereo Correspondence (cvaux)", dstAux)) using (new CvWindow("Stereo Correspondence (SGBM)", dstSGBM.ToIplImage())) { Cv.WaitKey(); } } } } }
private Image <Gray, short> FindDisparity1(Image <Bgr, Byte> image1, Image <Bgr, Byte> image2) { var disparity = new Image <Gray, short>(image1.Size); using ( StereoSGBM stereoSolver = new StereoSGBM(-(int)minDispSlider_.Value, (int)numDispSlider_.Value, (int)sadWindowSizeSlider_.Value, (int)p1Slider_.Value, (int)p2Slider_.Value, (int)disp12MaxDiffSlider_.Value, (int)preFilterCapSlider_.Value, (int)uniquenessRatioSlider_.Value, (int)speckleWindowSizeSlider_.Value, (int)speckleRangeSlider_.Value, StereoSGBM.Mode.SGBM) ) { stereoSolver.FindStereoCorrespondence(image1.Convert <Gray, Byte>(), image2.Convert <Gray, Byte>(), disparity); } return(disparity); }
public override Mat ComputeDepthMap(Image <Bgr, byte> leftImage, Image <Bgr, byte> rightImage) { StereoSGBM _stereoSGBM = CreateStereoSGBM(); ConvertImageToGray(leftImage, rightImage); Mat imageDisparity = new Mat(); Mat imageToSave = new Mat(); _stereoSGBM.Compute(LeftGrayImage, RightGrayImage, imageDisparity); imageDisparity.ConvertTo(imageToSave, DepthType.Cv8U); return(imageDisparity); }
public static Func <Bitmap> Go(uint w, uint h, byte[] one, byte[] two) { GCHandle gchOne = default(GCHandle), gchTwo = default(GCHandle); try { gchOne = GCHandle.Alloc(one, GCHandleType.Pinned); var ptrOne = gchOne.AddrOfPinnedObject(); gchTwo = GCHandle.Alloc(two, GCHandleType.Pinned); var ptrTwo = gchTwo.AddrOfPinnedObject(); var l_image = new Image <Gray, short>((int)w, (int)h, 1, ptrOne) .Resize(0.25, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR); var r_image = new Image <Gray, short>((int)w, (int)h, 1, ptrTwo) .Resize(0.25, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR); var disparity = new Image <Gray, short>(l_image.Size); using (StereoSGBM stereoSolver = new StereoSGBM( Config.MinDisparity , Config.NumDisparities , Config.SADWindowSize , Config.P1 , Config.P2 , Config.Disp12MaxDiff , Config.PreFilterCap , Config.UniquenessRatio , Config.SpeckleWindowSize , Config.SpeckleRange , StereoSGBM.Mode.SGBM )) { stereoSolver.FindStereoCorrespondence( l_image.Convert <Gray, byte>() , r_image.Convert <Gray, byte>() , disparity ); } //make it lazy so that it don't have to copy it twice return(() => disparity.Bitmap); } finally { if (gchOne.IsAllocated) { gchOne.Free(); } if (gchTwo.IsAllocated) { gchTwo.Free(); } } }
//窗体初始化 private void DisparityMeasure_Load(object sender, EventArgs e) { //初始化BM、SGBM类对象 bmMatch = StereoBM.Create(); sgbmMatch = StereoSGBM.Create(sgbm_minDisparity, sgbm_numofDisparities, sgbm_blockSize, sgbm_P1, sgbm_P2, sgbm_disp12MaxDiff, sgbm_preFilterCap, sgbm_uniquenessRatio, sgbm_speckleWindowSize, sgbm_speckleRange, sgbm_Mode); //初始化定时器 //dmTimer = new System.Threading.Timer(disparitymeTime, null, -1, 50); timer_disparityMeasure.Enabled = true; timer_disparityMeasure.Stop(); //计算有效视差区 //初始化BM参数 this.ucTrackBar_preFilterSize.Value = bm_preFilterSize; this.textBox_preFilterSize.Text = bm_preFilterSize.ToString(); this.ucTrackBar_preFilterCap.Value = bm_preFilterCap; this.textBox_preFilterCap.Text = bm_preFilterCap.ToString(); this.ucTrackBar_SADWinSize.Value = bm_SADWinSize; this.textBox_SADWinSize.Text = bm_SADWinSize.ToString(); this.ucTrackBar_minDIsparity.Value = bm_minDisparity; this.textBox_minDisparity.Text = bm_minDisparity.ToString(); //16倍倍数关系 this.ucTrackBar_numOfDis.Value = bm_numOfDisparities / 16; this.textBox_numOfDis.Text = bm_numOfDisparities.ToString(); this.ucTrackBar_uniquenessRatio.Value = bm_uniquenessRatio; this.textBox_uniquenessRatio.Text = bm_uniquenessRatio.ToString(); this.ucTrackBar_textureThre.Value = bm_textureThreshold; this.textBox_textureThreshold.Text = bm_textureThreshold.ToString(); this.ucTrackBar_speckleWinSize.Value = bm_speckleWinSize; this.textBox_speckleWinSize.Text = bm_speckleWinSize.ToString(); this.ucTrackBar_speckleRange.Value = bm_speckleRange; this.textBox_speckleRange.Text = bm_speckleRange.ToString(); this.ucTrackBar_disp12MaxDiff.Value = bm_disp12MaxDiff; this.textBox_disp12MaxDiff.Text = bm_disp12MaxDiff.ToString(); //初始化SGBM参数 this.numericUpDown_preFilterCap.Value = sgbm_preFilterCap; this.numericUpDown_sadWinSize.Value = sgbm_blockSize; this.numericUpDown_minDisparity.Value = sgbm_minDisparity; this.numericUpDown_numOfDisparities.Value = sgbm_numofDisparities; this.numericUpDown_p1.Value = sgbm_P1; this.numericUpDown_p2.Value = sgbm_P2; this.numericUpDown_uniquenessRatio.Value = sgbm_uniquenessRatio; this.numericUpDown_disp12MaxDiff.Value = sgbm_disp12MaxDiff; this.numericUpDown_speckleWinSize.Value = sgbm_speckleWindowSize; this.numericUpDown_speckcleRange.Value = sgbm_speckleRange; }
public void SimpleCompute() { var left = Image("tsukuba_left.png", ImreadModes.GrayScale); var right = Image("tsukuba_right.png", ImreadModes.GrayScale); var sbm = StereoSGBM.Create(0, 32, 5); var disparity = new Mat(); sbm.Compute(left, right, disparity); /* * double min, max; * Cv2.MinMaxLoc(disparity, out min, out max); * * var disparityU8 = new Mat(); * disparity.ConvertTo(disparityU8, MatType.CV_8UC1, 255 / (max - min), -255 * min / (max - min)); * Window.ShowImages(disparityU8); * //*/ }
public void SimpleCompute() { var left = Image("tsukuba_left.png", ImreadModes.Grayscale); var right = Image("tsukuba_right.png", ImreadModes.Grayscale); var sbm = StereoSGBM.Create(0, 32, 5); var disparity = new Mat(); sbm.Compute(left, right, disparity); if (Debugger.IsAttached) { Cv2.MinMaxLoc(disparity, out var min, out double max); var disparityU8 = new Mat(); disparity.ConvertTo(disparityU8, MatType.CV_8UC1, 255 / (max - min), -255 * min / (max - min)); Window.ShowImages(disparityU8); } }
public static Image <Gray, byte> Compute(StereoSgbmModel model) { var disparity = new Image <Gray, short>(model.Image1.Size); using (var stereoSolver = new StereoSGBM( model.MinDisparity, model.NumDisparity, model.SadWindowSize, model.P1, model.P2, model.Disparity12MaxDiff, model.PreFilterCap, model.UniquenessRatio, model.SpeckleWindowSize, model.SpeckleRange, model.Mode)) { stereoSolver.FindStereoCorrespondence(model.Image1, model.Image2, disparity); } return(disparity.Convert <Gray, byte>()); }
public Mat computeDisparity(Image <Bgr, byte> leftImg, Image <Bgr, byte> rightImg) { if (!_isCalibrated || !_isStereo) { return(new Mat()); } if (leftImg.Mat.Depth != rightImg.Mat.Depth) { return(new Mat()); } Mat disparity = new Mat(); using (StereoSGBM stereoSolver = new StereoSGBM(5, 64, 3)) { stereoSolver.Compute(leftImg, rightImg, disparity); //CvInvoke.ReprojectImageTo3D(disparity, img, disparityMatrix); //MCvPoint3D32f[] points = PointCollection.ReprojectImageTo3D(disparity, disparityMatrix); } return(disparity); }
/// <summary> /// Given the left and right image, computer the disparity map and the 3D point cloud. /// </summary> /// <param name="left">The left image</param> /// <param name="right">The right image</param> /// <param name="disparityMap">The left disparity map</param> /// <param name="points">The 3D point cloud within a [-0.5, 0.5] cube</param> public Computer3DPointsFromStereoPairOutput Computer3DPointsFromStereoPair(Image <Gray, Byte> left, Image <Gray, Byte> right, Compute3DFromStereoCfg cfg = null) { if (cfg == null) { cfg = new Compute3DFromStereoCfg(); } System.Drawing.Size size = left.Size; Computer3DPointsFromStereoPairOutput res = new Computer3DPointsFromStereoPairOutput(); res.disparityMap = new Image <Gray, short>(size); //thread safe calibration values /*Set it to true to run full-scale 2-pass dynamic programming algorithm. It will consume O(W*H*numDisparities) bytes, * which is large for 640x480 stereo and huge for HD-size pictures. By default this is usually false*/ //Set globally for ease //bool fullDP = true; using (StereoSGBM stereoSolver = new StereoSGBM(cfg.minDispatities, cfg.numDisparities, cfg.SAD, cfg.P1, cfg.P2, cfg.disp12MaxDiff, cfg.PreFilterCap, cfg.UniquenessRatio, cfg.Speckle, cfg.SpeckleRange, cfg.fullDP)) //using (StereoBM stereoSolver = new StereoBM(Emgu.CV.CvEnum.STEREO_BM_TYPE.BASIC, 0)) { //FindStereoCorrespondence stereoSolver.Compute(left, right, res.disparityMap);//Computes the disparity map using: /*GC: graph cut-based algorithm * BM: block matching algorithm * SGBM: modified H. Hirschmuller algorithm HH08*/ if (Q != null) { res.points = PointCollection.ReprojectImageTo3D(res.disparityMap, Q); //Reprojects disparity image to 3D space. } } return(res); }
/// <summary> /// Given the left and right image, computer the disparity map and the 3D point cloud. /// </summary> /// <param name="left">The left image</param> /// <param name="right">The right image</param> /// <param name="disparityMap">The left disparity map</param> /// <param name="points">The 3D point cloud within a [-0.5, 0.5] cube</param> private void Computer3DPointsFromStereoPair(Image <Gray, Byte> left, Image <Gray, Byte> right, out Image <Gray, short> disparityMap, out MCvPoint3D32f[] points) { Size size = left.Size; disparityMap = new Image <Gray, short>(size); //thread safe calibration values /*This is maximum disparity minus minimum disparity. Always greater than 0. In the current implementation this parameter must be divisible by 16.*/ int numDisparities = GetSliderValue(Num_Disparities); /*The minimum possible disparity value. Normally it is 0, but sometimes rectification algorithms can shift images, so this parameter needs to be adjusted accordingly*/ int minDispatities = GetSliderValue(Min_Disparities); /*The matched block size. Must be an odd number >=1 . Normally, it should be somewhere in 3..11 range*/ int SAD = GetSliderValue(SAD_Window); /*P1, P2 – Parameters that control disparity smoothness. The larger the values, the smoother the disparity. * P1 is the penalty on the disparity change by plus or minus 1 between neighbor pixels. * P2 is the penalty on the disparity change by more than 1 between neighbor pixels. * The algorithm requires P2 > P1 . * See stereo_match.cpp sample where some reasonably good P1 and P2 values are shown * (like 8*number_of_image_channels*SADWindowSize*SADWindowSize and 32*number_of_image_channels*SADWindowSize*SADWindowSize , respectively).*/ int P1 = 8 * 1 * SAD * SAD; //GetSliderValue(P1_Slider); int P2 = 32 * 1 * SAD * SAD; //GetSliderValue(P2_Slider); /* Maximum allowed difference (in integer pixel units) in the left-right disparity check. Set it to non-positive value to disable the check.*/ int disp12MaxDiff = GetSliderValue(Disp12MaxDiff); /*Truncation value for the prefiltered image pixels. * The algorithm first computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval. * The result values are passed to the Birchfield-Tomasi pixel cost function.*/ int PreFilterCap = GetSliderValue(pre_filter_cap); /*The margin in percents by which the best (minimum) computed cost function value should “win” the second best value to consider the found match correct. * Normally, some value within 5-15 range is good enough*/ int UniquenessRatio = GetSliderValue(uniquenessRatio); /*Maximum disparity variation within each connected component. * If you do speckle filtering, set it to some positive value, multiple of 16. * Normally, 16 or 32 is good enough*/ int Speckle = GetSliderValue(Speckle_Window); /*Maximum disparity variation within each connected component. If you do speckle filtering, set it to some positive value, multiple of 16. Normally, 16 or 32 is good enough.*/ int SpeckleRange = GetSliderValue(specklerange); /*Set it to true to run full-scale 2-pass dynamic programming algorithm. It will consume O(W*H*numDisparities) bytes, * which is large for 640x480 stereo and huge for HD-size pictures. By default this is usually false*/ //Set globally for ease //bool fullDP = true; using (StereoSGBM stereoSolver = new StereoSGBM(minDispatities, numDisparities, SAD, P1, P2, disp12MaxDiff, PreFilterCap, UniquenessRatio, Speckle, SpeckleRange, fullDP)) //using (StereoBM stereoSolver = new StereoBM(Emgu.CV.CvEnum.STEREO_BM_TYPE.BASIC, 0)) { stereoSolver.FindStereoCorrespondence(left, right, disparityMap);//Computes the disparity map using: /*GC: graph cut-based algorithm * BM: block matching algorithm * SGBM: modified H. Hirschmuller algorithm HH08*/ points = PointCollection.ReprojectImageTo3D(disparityMap, Q); //Reprojects disparity image to 3D space. } }
/// <summary> /// 初始化相关参数 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void _3DMeasure_Load(object sender, EventArgs e) { sgbm = new StereoSGBM(0, 64, 15, 0, 0, 1, 60, 10, 100, 32); gFTT = new GFTTDetector(4, 0.01, 1, 3, false, 0.04); //初始化GFTT ObjectPointsCal_Timer = new System.Threading.Timer(ObjectPointsCal, null, 0, 100); //初始化并启动定时器 }