示例#1
0
        public static void KeepVectorsByStatus(ref VectorOfKeyPoint f1, ref VectorOfPoint3D32F f2, VectorOfByte status)
        {
            var newF1 = new VectorOfKeyPoint();
            var newF2 = new VectorOfPoint3D32F();

            for (int i = 0; i < status.Size; i++)
            {
                if (status[i] > 0)
                {
                    newF1.Push(new[] { f1[i] });
                    newF2.Push(new[] { f2[i] });
                }
            }

            f1 = newF1;
            f2 = newF2;
        }
        public static double ValidateCharuco(int squaresX, int squaresY, float squareLength, float markerLength, PredefinedDictionaryName dictionary, Size imageSize, VectorOfInt charucoIds, VectorOfPointF charucoCorners, VectorOfInt markerCounterPerFrame, bool fisheye, Func <byte[], byte[]> GetRemoteChessboardCorner, Mat cameraMatrix, Mat distCoeffs)
        {
            VectorOfVectorOfPoint3D32F processedObjectPoints = new VectorOfVectorOfPoint3D32F();
            VectorOfVectorOfPointF     processedImagePoints  = new VectorOfVectorOfPointF();
            VectorOfPoint3D32F         rvecs = new VectorOfPoint3D32F();
            VectorOfPoint3D32F         tvecs = new VectorOfPoint3D32F();

            int k = 0;

            for (int i = 0; i < markerCounterPerFrame.Size; i++)
            {
                int                nMarkersInThisFrame       = markerCounterPerFrame[i];
                VectorOfPointF     currentImgPoints          = new VectorOfPointF();
                VectorOfPointF     currentImgPointsUndistort = new VectorOfPointF();
                VectorOfInt        currentIds       = new VectorOfInt();
                VectorOfPoint3D32F currentObjPoints = new VectorOfPoint3D32F();
                Mat                tvec             = new Mat();
                Mat                rvec             = new Mat();

                for (int j = 0; j < nMarkersInThisFrame; j++)
                {
                    currentImgPoints.Push(new PointF[] { charucoCorners[k] });
                    currentIds.Push(new int[] { charucoIds[k] });
                    currentObjPoints.Push(new MCvPoint3D32f[] { GetChessboardCorner(squaresX, squaresY, squareLength, markerLength, charucoIds[k], dictionary, GetRemoteChessboardCorner) });
                    k++;
                }

                Mat distCoeffsNew = new Mat(1, 4, DepthType.Cv64F, 1);
                distCoeffsNew.SetValue(0, 0, 0);
                distCoeffsNew.SetValue(0, 1, 0);
                distCoeffsNew.SetValue(0, 2, 0);
                distCoeffsNew.SetValue(0, 3, 0);

                Fisheye.UndistorPoints(currentImgPoints, currentImgPointsUndistort, cameraMatrix, distCoeffs, Mat.Eye(3, 3, DepthType.Cv64F, 1), Mat.Eye(3, 3, DepthType.Cv64F, 1));
                if (ArucoInvoke.EstimatePoseCharucoBoard(currentImgPointsUndistort, currentIds, CreateBoard(squaresX, squaresY, squareLength, markerLength, new Dictionary(dictionary)), Mat.Eye(3, 3, DepthType.Cv64F, 1), distCoeffsNew, rvec, tvec))
                {
                    rvecs.Push(new MCvPoint3D32f[] { new MCvPoint3D32f((float)rvec.GetValue(0, 0), (float)rvec.GetValue(1, 0), (float)rvec.GetValue(2, 0)) });
                    tvecs.Push(new MCvPoint3D32f[] { new MCvPoint3D32f((float)tvec.GetValue(0, 0), (float)tvec.GetValue(1, 0), (float)tvec.GetValue(2, 0)) });

                    processedImagePoints.Push(currentImgPoints);
                    processedObjectPoints.Push(currentObjPoints);
                }
            }

            return(Validate(processedObjectPoints, processedImagePoints, cameraMatrix, distCoeffs, rvecs, tvecs, fisheye));
        }
        public static (Mat cameraMatrix, Mat distCoeffs, double rms) CalibrateCharuco(int squaresX, int squaresY, float squareLength, float markerLength, PredefinedDictionaryName dictionary, Size imageSize, VectorOfInt charucoIds, VectorOfPointF charucoCorners, VectorOfInt markerCounterPerFrame, bool fisheye, Func <byte[], byte[]> GetRemoteChessboardCorner)
        {
            Mat    cameraMatrix = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            Mat    distCoeffs   = new Mat(1, 4, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            double rms          = 0.0;

            VectorOfVectorOfPoint3D32F processedObjectPoints = new VectorOfVectorOfPoint3D32F();
            VectorOfVectorOfPointF     processedImagePoints  = new VectorOfVectorOfPointF();

            int k = 0;

            for (int i = 0; i < markerCounterPerFrame.Size; i++)
            {
                int                nMarkersInThisFrame = markerCounterPerFrame[i];
                VectorOfPointF     currentImgPoints    = new VectorOfPointF();
                VectorOfPoint3D32F currentObjPoints    = new VectorOfPoint3D32F();

                for (int j = 0; j < nMarkersInThisFrame; j++)
                {
                    currentImgPoints.Push(new PointF[] { charucoCorners[k] });
                    currentObjPoints.Push(new MCvPoint3D32f[] { GetChessboardCorner(squaresX, squaresY, squareLength, markerLength, charucoIds[k], dictionary, GetRemoteChessboardCorner) });
                    k++;
                }

                processedImagePoints.Push(currentImgPoints);
                processedObjectPoints.Push(currentObjPoints);
            }

            VectorOfPoint3D32F rvecs = new VectorOfPoint3D32F();
            VectorOfPoint3D32F tvecs = new VectorOfPoint3D32F();

            if (fisheye)
            {
                Fisheye.Calibrate(processedObjectPoints, processedImagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, Fisheye.CalibrationFlag.FixSkew | Fisheye.CalibrationFlag.RecomputeExtrinsic, new MCvTermCriteria(400, double.Epsilon));
            }
            else
            {
                CvInvoke.CalibrateCamera(processedObjectPoints, processedImagePoints, imageSize, cameraMatrix, distCoeffs, new Mat(), new Mat(), CalibType.FixK3, new MCvTermCriteria(30, 1e-4));
            }

            rms = Validate(processedObjectPoints, processedImagePoints, cameraMatrix, distCoeffs, rvecs, tvecs, fisheye);

            return(cameraMatrix, distCoeffs, rms);
        }
示例#4
0
        public void MainStuff()
        {
            SRC_Img       = new Image <Gray, byte>(@"C:\Users\Админ\Downloads\image63341262,2002.png");
            Corrected_Img = SRC_Img.Clone();

            //CvInvoke.CLAHE(SRC_Img, 40, new Size(8, 8), Corrected_Img);
            //CvInvoke.FindChessboardCorners(SRC_Img, new Size(8,8), vec);
            #region
            PointF[] corners = new PointF[] { new PointF(100, 196), new PointF(261, 190), new PointF(417, 192), new PointF(584, 201),
                                              new PointF(111, 277), new PointF(284, 287), new PointF(458, 291), new PointF(580, 284),
                                              new PointF(130, 368), new PointF(276, 395), new PointF(429, 391), new PointF(563, 365) };
            #endregion
            VectorOfPointF vec = new VectorOfPointF();
            vec.Push(corners);
            // X: 0 - 480 / 3 ||0 159 329 479
            // Y: 0 - 210 / 2 || 0 104 209

            MCvPoint3D32f[] objCorners = new MCvPoint3D32f[] { new MCvPoint3D32f(0, 0, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, 0, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, 0, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, 0, 0.0f),
                                                               new MCvPoint3D32f(0, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, SRC_Img.Height / 2 - 1, 0.0f),
                                                               new MCvPoint3D32f(0, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, SRC_Img.Height - 1, 0.0f) };

            /*
             * for (int i = 0; i < objCorners.Length; i++)
             * {
             * objCorners[i].X += SRC_Img.Width / 2;
             * objCorners[i].Y += SRC_Img.Height / 2;
             * }*/
            //VectorOfPointF objvec = new VectorOfPointF();
            //objvec.Push(objCorners);


            //Corrected_Img = FindTable(SRC_Img);
            Matrix <double> CameraMatrix = new Matrix <double>(3, 3, 1);
            CameraMatrix[0, 0] = 1;
            CameraMatrix[1, 1] = 1;
            CameraMatrix[2, 2] = 1;
            CameraMatrix[0, 2] = 349.417;
            CameraMatrix[1, 2] = 286.417;

            Mat newCameraMatrix = CvInvoke.GetDefaultNewCameraMatrix(CameraMatrix);
            //CvInvoke.Undistort(SRC_Img, Corrected_Img,
            //CvInvoke.FindChessboardCorners(SRC_Img, new System.Drawing.Size(5,5),

            Mat             distCoeffs    = new Mat(1, 5, DepthType.Cv32F, 1);
            Mat             rotCoeffs     = new Mat();
            Mat             translVectors = new Mat();
            MCvTermCriteria TermCriteria  = new MCvTermCriteria(30, 0.1);
            Corrected_Img = SRC_Img.Clone();
            CvInvoke.DrawChessboardCorners(Corrected_Img, new System.Drawing.Size(4, 3), vec, true);
            //CvInvoke.CornerSubPix(SRC_Img, vec, new Size(2, 2), new Size(-1, -1), TermCriteria);
            //CvInvoke.DrawChessboardCorners(SRC_Img, new System.Drawing.Size(4, 3), objvec, true);

            /*
             * try
             * {
             * CvInvoke.Remap(SRC_Img, Corrected_Img, vec, objvec, Inter.Nearest, BorderType.Constant);
             * } catch (Exception ex) { string s = ex.Message; }
             */
            VectorOfPoint3D32F obj3dvec = new VectorOfPoint3D32F();
            obj3dvec.Push(objCorners);

            try
            {
                MCvPoint3D32f[][] corners_object_list = new MCvPoint3D32f[1][];
                PointF[][]        corners_points_list = new PointF[1][];
                corners_object_list[0] = objCorners;
                corners_points_list[0] = corners;
                double r = CvInvoke.CalibrateCamera(obj3dvec,
                                                    vec,
                                                    SRC_Img.Size,
                                                    CameraMatrix,
                                                    distCoeffs,
                                                    rotCoeffs,
                                                    translVectors,
                                                    CalibType.Default,
                                                    TermCriteria);

                //double error = CameraCalibration.CalibrateCamera(corners_object_list, corners_points_list, Gray_Frame.Size, IC, Emgu.CV.CvEnum.CALIB_TYPE.CV_CALIB_RATIONAL_MODEL, out EX_Param);
                r += 0;
                //Matrix<float> dist = new Matrix<float>( new float[] {

                //CvInvoke.Undistort(SRC_Img, Corrected_Img, cameraMatrix, );
            } catch (Exception ex) { }

            IntrinsicCameraParameters IC = new IntrinsicCameraParameters(8);
            Matrix <float>            Map1, Map2;
            IC.InitUndistortMap(SRC_Img.Width, SRC_Img.Height, out Map1, out Map2);
            Image <Gray, Byte> stuff = Undistort(SRC_Img);

            imageBox1.Image = SRC_Img.Resize(imageBox1.Width, imageBox1.Height, Inter.Linear);
            imageBox2.Image = Corrected_Img.Resize(imageBox1.Width, imageBox1.Height, Inter.Linear);
        }
示例#5
0
        public void SuperR()
        {
            SRC_Img       = new Image <Gray, byte>(@"C:\Users\Админ\Downloads\image63341262,2002.png");
            Corrected_Img = SRC_Img.Clone();

            PointF[] corners = new PointF[] { new PointF(100, 196), new PointF(261, 190), new PointF(417, 192), new PointF(584, 201),
                                              new PointF(111, 277), new PointF(284, 287), new PointF(458, 291), new PointF(580, 284),
                                              new PointF(130, 368), new PointF(276, 395), new PointF(429, 391), new PointF(563, 365) };

            /*MCvPoint3D32f[] objCorners = new MCvPoint3D32f[] { new MCvPoint3D32f( 0, 0, 0.0f),    new MCvPoint3D32f(SRC_Img.Width / 3 - 1, 0, 0.0f),       new MCvPoint3D32f( 2 * SRC_Img.Width / 3 - 1, 0, 0.0f),    new MCvPoint3D32f( SRC_Img.Width - 1, 0, 0.0f),
             *                                  new MCvPoint3D32f( 0, SRC_Img.Height / 2 - 1, 0.0f),  new MCvPoint3D32f(SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f),     new MCvPoint3D32f( 2 * SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f),  new MCvPoint3D32f( SRC_Img.Width - 1, SRC_Img.Height / 2 - 1, 0.0f),
             *                                  new MCvPoint3D32f( 0, SRC_Img.Height - 1, 0.0f),  new MCvPoint3D32f( SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f),    new MCvPoint3D32f( 2 * SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f),  new MCvPoint3D32f( SRC_Img.Width - 1, SRC_Img.Height - 1, 0.0f)
             *                            };
             */
            // X: 0 - 480 / 3 ||0 159 329 479
            // Y: 0 - 210 / 2 || 0 104 209

            MCvPoint3D32f[] objCorners = new MCvPoint3D32f[] { new MCvPoint3D32f(0, 0, 0.0f), new MCvPoint3D32f(159, 0, 0.0f), new MCvPoint3D32f(329, 0, 0.0f), new MCvPoint3D32f(479, 0, 0.0f),
                                                               new MCvPoint3D32f(0, 104, 0.0f), new MCvPoint3D32f(159, 104, 0.0f), new MCvPoint3D32f(329, 104, 0.0f), new MCvPoint3D32f(479, 104, 0.0f),
                                                               new MCvPoint3D32f(0, 209, 0.0f), new MCvPoint3D32f(159, 209, 0.0f), new MCvPoint3D32f(329, 209, 0.0f), new MCvPoint3D32f(479, 209, 0.0f) };

            VectorOfPointF veccorners = new VectorOfPointF();

            veccorners.Push(corners);
            VectorOfPoint3D32F vecobjcorners = new VectorOfPoint3D32F();

            vecobjcorners.Push(objCorners);

            MCvTermCriteria TermCriteria = new MCvTermCriteria(30, 0.1);

            CvInvoke.CornerSubPix(SRC_Img, veccorners, new Size(2, 2), new Size(-1, -1), TermCriteria);

            IntrinsicCameraParameters intrisic = new IntrinsicCameraParameters();

            ExtrinsicCameraParameters[] extrinsic;
            intrisic.IntrinsicMatrix = new Matrix <double>(new double[, ] {
                { 1, 0, 349.417 }, { 0, 1, 286.417 }, { 0, 0, 1 }
            });
            try
            {
                Matrix <float> distortCoeffs   = new Matrix <float>(1, 4);
                Mat            rotationVectors = new Mat();
                //rotationVectors[0] = new Mat(3,1, DepthType.Cv32F, 1);
                Mat translationVectors = new Mat();
                //translationVectors[0] = new Mat(1, 3, DepthType.Cv32F, 1);

                /*
                 * double error = CvInvoke.CalibrateCamera(new MCvPoint3D32f[][] { objCorners }, new PointF[][] { veccorners.ToArray() },
                 *   SRC_Img.Size, intrisic.IntrinsicMatrix, distortCoeffs, CalibType.UserIntrinsicGuess, new MCvTermCriteria(30, 0.01), out rotationVectors, out translationVectors);
                 */
                /*
                 *
                 * Fisheye.Calibrate(vecobjcorners, veccorners, SRC_Img.Size, intrisic.IntrinsicMatrix, distortCoeffs, rotationVectors, translationVectors,
                 * Fisheye.CalibrationFlag.UseIntrinsicGuess, TermCriteria);
                 * */

                Matrix <float> matrix = new Matrix <float>(new float[, ] {
                    { 1, 0, 349 }, { 0, 1, 286 }, { 0, 0, 1 }
                });
                Fisheye.UndistorImage(SRC_Img, Corrected_Img, matrix, new VectorOfFloat(new float[] { 3500, 3500, 0, 0 }));
                Image <Gray, Byte> Res_Img = new Image <Gray, byte>(2 * SRC_Img.Width, SRC_Img.Height);
                CvInvoke.HConcat(SRC_Img, Corrected_Img, Res_Img);
                int error = 0;
                error++;
                //error += 0;
                //Array aa = rotationVectors[0].Data;
                //error += 0;
                //float q = rotationVectors.ElementAt<float>(0);
            }
            catch (Exception) { }
        }
 private void ObjectPointsCal(object state)
 {
     if (BM_CAL_FLAG)                                //BM视差图计算
     {
         //获取原图
         Data.leftImg.CopyTo(bm_lsrc);
         Data.rightImg.CopyTo(bm_rsrc);
         //转换为灰度图
         CvInvoke.CvtColor(bm_lsrc, bm_lsrc, ColorConversion.Bgr2Gray);
         CvInvoke.CvtColor(bm_rsrc, bm_rsrc, ColorConversion.Bgr2Gray);
         bm.Compute(bm_lsrc, bm_rsrc, bm_distImg);                                              //BM算法只能处理灰度图
         bm_distImg.ConvertTo(bm_distImg, DepthType.Cv32F, 1.0 / 16);                           //除16得到真正的视差图 -----这里的数据需要再次使用
         bm_distImg8U = new Mat(bm_distImg.Size, DepthType.Cv8U, 1);
         CvInvoke.Normalize(bm_distImg, bm_distImg8U, 0, 255, NormType.MinMax, DepthType.Cv8U); //归一化后显示
         this.imageBox1.Image = bm_distImg8U;
     }
     if (SHOWCONTOURS_FLAG)                         //轮廓角点计算
     {
         contourImg.SetTo(new MCvScalar(0, 0, 0));
         Data.leftImg.CopyTo(contourSrc);
         CvInvoke.CvtColor(contourSrc, grayImg, ColorConversion.Bgr2Gray);
         CvInvoke.Canny(grayImg, cannyImg, 100, 200, 3, false);                            //转换为二值图
         CvInvoke.FindContours(cannyImg, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone);
         CvInvoke.DrawContours(contourImg, contours, -1, new MCvScalar(255, 255, 255), 2); //绘制所有轮廓
         cornerPoints = gFTT.Detect(grayImg);                                              //计算角点
         for (int i = 0; i < cornerPoints.Length; i++)
         {
             Point pt = new Point();
             pt.X = (int)cornerPoints[i].Point.X;
             pt.Y = (int)cornerPoints[i].Point.Y;
             CvInvoke.Circle(contourImg, pt, 3, new MCvScalar(0, 0, 255), -1); //绘制所有角点
         }
         this.imageBox2.Image = contourImg;                                    //显示图像
     }
     if (OBJPOINTS_CAL_FLAG)                                                   //计算角点的三维坐标在TextBox中显示
     {
         //2种方法
         Image <Gray, Single> bm_distImg_C = bm_distImg.ToImage <Gray, Single>();
         try
         {
             for (int i = 0; i < cornerPoints.Length; i++)
             {
                 MCvPoint3D32f[] ptf = new MCvPoint3D32f[1];                          //转换计算得到的角点,记得清空
                 ptf[0].X = cornerPoints[i].Point.X;
                 ptf[0].Y = cornerPoints[i].Point.Y;
                 if (ptf[0].X > bm_distImg_C.Width)                               //数组越界检查
                 {
                     ptf[0].X = bm_distImg_C.Width - 1;
                 }
                 if (ptf[0].Y > bm_distImg_C.Height)
                 {
                     ptf[0].Y = bm_distImg_C.Height - 1;
                 }
                 ptf[0].Z = bm_distImg_C.Data[(int)ptf[0].X, (int)ptf[0].Y, 0];      //获取角点在视差图中的深度值(单位是像素)
                 cornerPoints_vec.Push(ptf);                                         //存储转换好后的值
             }
             CvInvoke.PerspectiveTransform(cornerPoints_vec, objXYZ, Data.Q);        //透视变换,得到稀疏特征点在摄像机坐标系下的坐标
             //方法2
             //CvInvoke.ReprojectImageTo3D(bm_distImg, bm_image3D, Data.Q, true);
             //for(int i = 0; i < cornerPoints.Length; i++)
             //{
             //    MCvPoint3D32f[] ptf = new MCvPoint3D32f[1];
             //    //需要处理数组越界问题
             //    ptf[0].X = this.bm_image3D.Data[(int)cornerPoints[i].Point.X, (int)cornerPoints[i].Point.Y, 0];
             //    ptf[0].Y = this.bm_image3D.Data[(int)cornerPoints[i].Point.X, (int)cornerPoints[i].Point.Y, 1];
             //    ptf[0].Z = this.bm_image3D.Data[(int)cornerPoints[i].Point.X, (int)cornerPoints[i].Point.Y, 2];
             //    objXYZ.Push(ptf);               //存储计算好的空间坐标点
             //}
             //写入数据  必须调用Invoke方法
             this.Invoke(new UpdateTextBox(UpdateTextBoxFunc), new object[] { });
         }
         catch (Exception e)
         {
             Data.LogString = "[error]  " + e.Message;
         }
     }
     cornerPoints_vec.Clear();                        //清空元素,Vector会一直叠加
 }
示例#7
0
        /// <summary>
        /// 相机内参计算函数
        /// </summary>
        void CameraIntrinsicCalculate()
        {
            string[] allimgfiles = File.ReadAllLines(root + @"\calibrationImg.txt");    //读取文件中所有图片名称
            string   imgfile     = root + @"\" + allimgfiles[0];                        //必须是完整的路径

            try
            {
                for (int i = 0; i < allimgfiles.Length; i++)
                {
                    imgfile = root + @"\" + allimgfiles[i];
                    if (imgfile.Contains("leftImg"))                 //提取左相机图片角点
                    {
                        srcImg = CvInvoke.Imread(imgfile, ImreadModes.Color);
                        CvInvoke.CvtColor(srcImg, grayImg, ColorConversion.Bgr2Gray);
                        if (false == CvInvoke.FindChessboardCorners(grayImg, patternSize, corners))
                        {
                            throw new Exception("左相机采集图片未识别到所有角点~,请重新采集~");
                        }
                        else
                        {
                            CvInvoke.Find4QuadCornerSubpix(grayImg, corners, new Size(11, 11)); //亚像素级角点精确化
                            left_corners_set.Push(corners);                                     //存储提取的角点坐标
                        }
                    }
                    else if (imgfile.Contains("rightImg"))           //提取右相机图片角点
                    {
                        srcImg = CvInvoke.Imread(imgfile, ImreadModes.Color);
                        CvInvoke.CvtColor(srcImg, grayImg, ColorConversion.Bgr2Gray);
                        if (false == CvInvoke.FindChessboardCorners(grayImg, patternSize, corners))
                        {
                            throw new Exception("右相机采集图片未识别到所有角点~,请重新采集~");
                        }
                        else
                        {
                            CvInvoke.Find4QuadCornerSubpix(grayImg, corners, new Size(11, 11)); //亚像素级角点精确化
                            right_corners_set.Push(corners);                                    //存储提取的角点坐标
                        }
                    }
                    CvInvoke.WaitKey(10);
                }
                for (int k = 0; k < left_corners_set.Size; k++)
                {
                    VectorOfPoint3D32F tempPoint = new VectorOfPoint3D32F();
                    for (int i = 0; i < patternSize.Height; i++)
                    {
                        for (int j = 0; j < patternSize.Width; j++)
                        {
                            MCvPoint3D32f[] objPoint = new MCvPoint3D32f[1];
                            objPoint[0].X = j * SquareSize;
                            objPoint[0].Y = i * SquareSize;
                            objPoint[0].Z = 0;
                            tempPoint.Push(objPoint);
                        }
                    }
                    objectpoints.Push(tempPoint);
                }
                //左相机内参标定
                CvInvoke.CalibrateCamera(objectpoints, left_corners_set, new Size(320, 240),
                                         leftCamMatrix, leftDistCoeffs, leftRvecs, leftTvecs, CalibType.Default, new MCvTermCriteria(30, 0.00001));
                //右相机内参标定
                CvInvoke.CalibrateCamera(objectpoints, right_corners_set, new Size(320, 240),
                                         rightCamMatrix, rightDistCoeffs, rightRvecs, rightTvecs, CalibType.Default, new MCvTermCriteria(30, 0.00001));
                Data.LogString = "相机内参标定完成,请保存数据~~";
            }
            catch (Exception e)
            {
                Data.LogString = e.Message;
            }
        }