//called when data for any output pin is requested
        public void Evaluate(int SpreadMax)
        {
            //if (!FPinInIntrinsics.IsChanged)
            //    return;

            if (FPinInCameraTransform[0] == null)
            {
                FPinOutIntrinsics.SliceCount = 0;
            }
            else
            {
                FPinOutIntrinsics.SliceCount = SpreadMax;

                for (int i = 0; i < SpreadMax; i++)
                {
                    IntrinsicCameraParameters cp = new IntrinsicCameraParameters();
                    // Inspiration / details from http://strawlab.org/2011/11/05/augmented-reality-with-OpenGL/ - See Intrinsics.cs too
                    cp.IntrinsicMatrix[0, 0] = FPinInCameraTransform[i][0, 0] * FPinInSensorSize[i].x / 2;       // =L15*P8/2
                    cp.IntrinsicMatrix[1, 1] = FPinInCameraTransform[i][1, 1] * FPinInSensorSize[i].y / 2;       // =L15*P8/2
                    cp.IntrinsicMatrix[0, 2] = (FPinInCameraTransform[i][2, 0] + 1) * FPinInSensorSize[i].x / 2; // =L15*P8/2
                    cp.IntrinsicMatrix[1, 2] = (FPinInCameraTransform[i][2, 1] + 1) * FPinInSensorSize[i].y / 2; // =L15*P8/2
                    cp.IntrinsicMatrix[2, 2] = 1;
                    FPinOutIntrinsics[i]     = new Intrinsics(cp, new Size((int)FPinInSensorSize[i].x, (int)FPinInSensorSize[i].y));
                }
            }
        }
Esempio n. 2
0
        private void startCalibrate()
        {
            if (validControls())
            {
                patternSize = new Size(Convert.ToInt16(Corners_Nx.Text), Convert.ToInt16(Corners_Ny.Text));
                nPoints     = patternSize.Height * patternSize.Width;
                nImage      = Convert.ToInt16(Image_Count.Text);
                square      = (float)Convert.ToDouble(Square_Size.Text);

                isCalibrating = true;

                intrinsicParam = cameraCalibration();

                SetText(textBox1, intrinsicParam.IntrinsicMatrix[0, 0].ToString());
                SetText(textBox2, intrinsicParam.IntrinsicMatrix[1, 1].ToString());
                SetText(textBox3, intrinsicParam.IntrinsicMatrix[0, 2].ToString());
                SetText(textBox4, intrinsicParam.IntrinsicMatrix[1, 2].ToString());

                SetText(textBox5, intrinsicParam.DistortionCoeffs[0, 0].ToString());
                SetText(textBox6, intrinsicParam.DistortionCoeffs[1, 0].ToString());
                SetText(textBox7, intrinsicParam.DistortionCoeffs[4, 0].ToString());
                SetText(textBox8, intrinsicParam.DistortionCoeffs[2, 0].ToString());
                SetText(textBox9, intrinsicParam.DistortionCoeffs[3, 0].ToString());

                isCalibrating = false;
            }
            else
            {
                MessageBox.Show("Please Input ChessBoard Params");
            }
        }
Esempio n. 3
0
    private IntrinsicCameraParameters CreateIntrinsicGuess()
    {
        double height = (double)_size.Height;
        double width  = (double)_size.Width;

        // from https://docs.google.com/spreadsheet/ccc?key=0AuC4NW61c3-cdDFhb1JxWUFIVWpEdXhabFNjdDJLZXc#gid=0
        // taken from http://www.neilmendoza.com/projector-field-view-calculator/
        float hfov = 91.2705674249382f;
        float vfov = 59.8076333281726f;

        double fx = (double)((float)width / (2.0f * Mathf.Tan(0.5f * hfov * Mathf.Deg2Rad)));
        double fy = (double)((float)height / (2.0f * Mathf.Tan(0.5f * vfov * Mathf.Deg2Rad)));

        double cy = height / 2.0;
        double cx = width / 2.0;

        var intrinsics = new IntrinsicCameraParameters();

        intrinsics.IntrinsicMatrix[0, 0] = fx;
        intrinsics.IntrinsicMatrix[0, 2] = cx;
        intrinsics.IntrinsicMatrix[1, 1] = fy;
        intrinsics.IntrinsicMatrix[1, 2] = cy;
        intrinsics.IntrinsicMatrix[2, 2] = 1;

        return(intrinsics);
    }
Esempio n. 4
0
 public void Initialize(DistortionParameters parameters, Size imageSize)
 {
     this.parameters = parameters;
     this.imageSize  = imageSize;
     icp             = parameters.IntrinsicCameraParameters;
     initialized     = true;
 }
Esempio n. 5
0
        public CalibrationCameraResult()
        {
            IntrinsicParameters = new IntrinsicCameraParameters();

            R = new Matrix <double>(3, 3);
            P = new Matrix <double>(3, 4);
        }
        private void Form1_Shown(object sender, EventArgs e)
        {
            updateCommPortCombos();



            comboResolution.SelectedIndex = 0;

            Matrix <double> test = new Matrix <double>(3, 3);

            LatestCamIntrinsics = new IntrinsicCameraParameters();
            // initializing the intrinsics matrix to the identity
            LatestCamIntrinsics.IntrinsicMatrix.SetIdentity();

            //LatestCamIntrinsics.DistortionCoeffs = new Matrix<double>(4, 1);
            LatestCamIntrinsics.DistortionCoeffs.SetZero();

            gps = null;

            calibrator = null;

            imu = null;

            comboResolution.SelectedIndex = 2;


            // start the timer
            timer1.Start();
        }
Esempio n. 7
0
        public DistortionParameters(double k1, double k2, double k3, double p1, double p2, double fx, double fy, double cx, double cy)
        {
            this.K1 = k1;
            this.K2 = k2;
            this.K3 = k3;
            this.P1 = p1;
            this.P2 = p2;
            this.Fx = fx;
            this.Fy = fy;
            this.Cx = cx;
            this.Cy = cy;

            IntrinsicCameraParameters icp = new IntrinsicCameraParameters();

            icp.DistortionCoeffs[0, 0] = k1;
            icp.DistortionCoeffs[1, 0] = k2;
            icp.DistortionCoeffs[4, 0] = k3;
            icp.DistortionCoeffs[2, 0] = p1;
            icp.DistortionCoeffs[3, 0] = p2;

            icp.IntrinsicMatrix[0, 0] = fx;
            icp.IntrinsicMatrix[1, 1] = fy;
            icp.IntrinsicMatrix[0, 2] = cx;
            icp.IntrinsicMatrix[1, 2] = cy;
            icp.IntrinsicMatrix[2, 2] = 1;

            this.IntrinsicCameraParameters = icp;
        }
Esempio n. 8
0
    public CalibrationUtil(Camera cam)
    {
        this.cam = cam;

        objectPoints    = new MCvPoint3D32f[1][];
        objectPoints[0] = new MCvPoint3D32f[0];

        imagePoints    = new PointF[1][];
        imagePoints[0] = new PointF[0];

        intrinsics = new IntrinsicCameraParameters();
        imageSize  = new Size(Screen.width, Screen.height);

        //Settings based on Mapamok default settings
        calibrationType = CALIB_TYPE.CV_CALIB_USE_INTRINSIC_GUESS
                          | CALIB_TYPE.CV_CALIB_FIX_PRINCIPAL_POINT //required to work properly !!
                          | CALIB_TYPE.CV_CALIB_FIX_ASPECT_RATIO
                          | CALIB_TYPE.CV_CALIB_FIX_K1
                          | CALIB_TYPE.CV_CALIB_FIX_K2
                          | CALIB_TYPE.CV_CALIB_FIX_K3
                          | CALIB_TYPE.CV_CALIB_FIX_K4
                          | CALIB_TYPE.CV_CALIB_FIX_K5
                          | CALIB_TYPE.CV_CALIB_ZERO_TANGENT_DIST
        ;

        termCriteria = new MCvTermCriteria();
    }
Esempio n. 9
0
        //called when data for any output pin is requested
        public void Evaluate(int SpreadMax)
        {
            IntrinsicCameraParameters intrinsics = FPinInIntinsics[0];

            if (intrinsics != null)
            {
                FPinOutDistiortonCoefficients.SliceCount = 5;
                for (int i = 0; i < 5; i++)
                {
                    FPinOutDistiortonCoefficients[i] = intrinsics.DistortionCoeffs[i, 0];
                }

                FPinOutCameraMatrix.SliceCount = 9;

                for (int j = 0; j < 3; j++)
                {
                    for (int i = 0; i < 3; i++)
                    {
                        FPinOutCameraMatrix[j + i * 3] = intrinsics.IntrinsicMatrix[i, j];
                    }
                }

                FPinOutCamreaTransform.SliceCount = 1;
                Matrix4x4 m = new Matrix4x4();
                m[0, 0] = intrinsics.IntrinsicMatrix[0, 0];
                m[1, 1] = intrinsics.IntrinsicMatrix[1, 1];
                m[2, 0] = intrinsics.IntrinsicMatrix[0, 2];
                m[2, 1] = intrinsics.IntrinsicMatrix[1, 2];
                m[2, 2] = 1;
                m[2, 3] = 1;
                m[3, 3] = 0;

                FPinOutCamreaTransform[0] = m;
            }
        }
Esempio n. 10
0
        public void Calibrate()
        {
            MCvPoint3D32f[][] cornerPointsOfImages = new MCvPoint3D32f[this.AcceptedImagesCount][];
            for (int i = 0; i < this.AcceptedImagesCount; i++)
            {
                // for each captured image the physical coordinates of the corner points of the chess board are the same
                // in the coordinate system of the chess board.
                cornerPointsOfImages[i] = this.ChessBoard.CornerPoints;
            }

            IntrinsicCameraParameters intrinsicCameraParameters = new IntrinsicCameraParameters();

            // We initialize the intrinsic matrix such that the two focal lengths have a ratio of 1.0
            intrinsicCameraParameters.IntrinsicMatrix[0, 0] = 1.0;
            intrinsicCameraParameters.IntrinsicMatrix[1, 1] = 1.0;

            ExtrinsicCameraParameters[] extrinsicCameraParametersForImages;             // will hold the rotation and translation for each captured chess board
            CameraCalibration.CalibrateCamera(
                cornerPointsOfImages,
                m_AcceptedImageCorners.ToArray(),
                this.OriginalImage.Size,
                intrinsicCameraParameters,
                Emgu.CV.CvEnum.CALIB_TYPE.CV_CALIB_FIX_ASPECT_RATIO | Emgu.CV.CvEnum.CALIB_TYPE.CV_CALIB_FIX_K3 | Emgu.CV.CvEnum.CALIB_TYPE.CV_CALIB_ZERO_TANGENT_DIST,
                out extrinsicCameraParametersForImages);

            m_IntrinsicCameraParameters = intrinsicCameraParameters;
            this.State = CalibratorState.Calibrated;
        }
Esempio n. 11
0
        public TestFormModel(IntrinsicCameraParameters intrinsicCameraParameters)
        {
            m_IntrinsicCameraParameters = intrinsicCameraParameters;
            m_Capture = new Capture();

            Initialize();
        }
Esempio n. 12
0
        public void TestChessboardCalibration()
        {
            Size patternSize = new Size(9, 6);

            Image <Gray, Byte> chessboardImage = EmguAssert.LoadImage <Gray, byte>("left01.jpg");

            Util.VectorOfPointF corners = new Util.VectorOfPointF();
            bool patternWasFound        = CvInvoke.FindChessboardCorners(chessboardImage, patternSize, corners);

            chessboardImage.FindCornerSubPix(
                new PointF[][] { corners.ToArray() },
                new Size(10, 10),
                new Size(-1, -1),
                new MCvTermCriteria(0.05));

            MCvPoint3D32f[]           objectPts = CalcChessboardCorners(patternSize, 1.0f);
            IntrinsicCameraParameters intrisic  = new IntrinsicCameraParameters(8);

            ExtrinsicCameraParameters[] extrinsic;
            double error = CameraCalibration.CalibrateCamera(new MCvPoint3D32f[][] { objectPts }, new PointF[][] { corners.ToArray() },
                                                             chessboardImage.Size, intrisic, CvEnum.CalibType.Default, new MCvTermCriteria(30, 1.0e-10), out extrinsic);

            CvInvoke.DrawChessboardCorners(chessboardImage, patternSize, corners, patternWasFound);
            //CameraCalibration.DrawChessboardCorners(chessboardImage, patternSize, corners);
            Image <Gray, Byte> undistorted = intrisic.Undistort(chessboardImage);
            //UI.ImageViewer.Show(undistorted, String.Format("Reprojection error: {0}", error));
        }
Esempio n. 13
0
    private void ApplyCalibrationToUnityCamera(IntrinsicCameraParameters intrinsic, ExtrinsicCameraParameters extrinsics)
    {
        Matrix <double> rotationInverse = flipZAxis(extrinsics.RotationVector.RotationMatrix).Transpose(); // transpose is same as inverse for rotation matrix
        Matrix <double> transFinal      = (rotationInverse * -1) * extrinsics.TranslationVector;

        _mainCamera.projectionMatrix = LoadProjectionMatrix((float)intrinsic.IntrinsicMatrix[0, 0], (float)intrinsic.IntrinsicMatrix[1, 1], (float)intrinsic.IntrinsicMatrix[0, 2], (float)intrinsic.IntrinsicMatrix[1, 2]);
        ApplyTranslationAndRotationToCamera(transFinal, RotationConversion.RotationMatrixToEulerZXY(rotationInverse));
    }
 public static void Save(IntrinsicCameraParameters cameraParameters, string filePath)
 {
     using (TextWriter writer = new StreamWriter(filePath))
     {
         WriteIntrinsics(cameraParameters.IntrinsicMatrix, writer);
         writer.WriteLine();
         WriteDistortionCoeffs(cameraParameters.DistortionCoeffs, writer);
     }
 }
Esempio n. 15
0
 /// <summary>
 /// single camera calibration
 /// </summary>
 /// <param name="objectPoints">corners value in world coordinate</param>
 /// <param name="imagePoints">corners value in image coordinate</param>
 /// <param name="imageSize">image size</param>
 /// <param name="intrinsicParam">camera intrinsic</param>
 /// <param name="extrinsicParams">camera extrinsic</param>
 /// <returns>reprojection error</returns>
 private double singleCameraCalibration(MCvPoint3D32f[][] objectPoints,
                                        PointF[][] imagePoints,
                                        Size imageSize,
                                        IntrinsicCameraParameters intrinsicParam,
                                        ExtrinsicCameraParameters[] extrinsicParams
                                        )
 {
     return(CameraCalibration.CalibrateCamera(objectPoints, imagePoints, imageSize, intrinsicParam,
                                              CALIB_TYPE.CV_CALIB_FIX_K3, criteria, out extrinsicParams));
 }
Esempio n. 16
0
 /// <summary>
 /// Initializes a new instance of the CameraCalibrationModel class.
 /// </summary>
 public CameraCalibrationModel()
 {
     _frameBufferSize = 100;
     _currentMode = CameraCalibrationMode.SavingFrames;
     _intrinsicParameters = new IntrinsicCameraParameters();
     _extrinsicParameters = new ExtrinsicCameraParameters[5];
     _translationToWorld = new Matrix<double>(3, 1);
     _rotationToWorld = new Matrix<double>(3, 3);
     _correspondingPoints = new List<PointF>();
 }
Esempio n. 17
0
        static void Main()
        {
            IntrinsicCameraParameters intrinsicCameraParameters = IntrinsicParametersSupport.Load(@"C:\svnDev\Rainer\Robot\CameraCalibration\CameraCalibrator\bin\Debug\CameraParameters.txt");

            using (TestFormModel testFormModel = new TestFormModel(intrinsicCameraParameters))
            {
                Application.EnableVisualStyles();
                Application.SetCompatibleTextRenderingDefault(false);
                Application.Run(new TestForm(testFormModel));
            }
        }
Esempio n. 18
0
    public double calibrateFromCorrespondences(List <Vector3> imagePoints, List <Vector3> worldPoints)
    {
        PointF[][]                imagePointsCvMat = PutImagePointsIntoArray(imagePoints, imagePoints.Count);
        MCvPoint3D32f[][]         worldPointsCvMat = PutObjectPointsIntoArray(worldPoints, imagePoints.Count, _numImages);
        IntrinsicCameraParameters intrinsic        = CreateIntrinsicGuess();

        ExtrinsicCameraParameters[] extrinsics;
        var calibrationError = CameraCalibration.CalibrateCamera(worldPointsCvMat, imagePointsCvMat, _size, intrinsic, _flags, _termCriteria, out extrinsics);

        ApplyCalibrationToUnityCamera(intrinsic, extrinsics[0]);
        return(calibrationError);
    }
Esempio n. 19
0
      /// <summary>
      /// Estimates intrinsic camera parameters and extrinsic parameters for each of the views
      /// </summary>
      /// <param name="objectPoints">The 3D location of the object points. The first index is the index of image, second index is the index of the point</param>
      /// <param name="imagePoints">The 2D image location of the points. The first index is the index of the image, second index is the index of the point</param>
      /// <param name="imageSize">The size of the image, used only to initialize intrinsic camera matrix</param>
      /// <param name="intrinsicParam">The intrisinc parameters, might contains some initial values. The values will be modified by this function.</param>
      /// <param name="flags">Flags</param>
      /// <param name="extrinsicParams">The output array of extrinsic parameters.</param>
      /// <returns>The final reprojection error</returns>
      public static double CalibrateCamera(
         MCvPoint3D32f[][] objectPoints,
         PointF[][] imagePoints,
         Size imageSize,
         IntrinsicCameraParameters intrinsicParam,
         CvEnum.CALIB_TYPE flags,
         out ExtrinsicCameraParameters[] extrinsicParams)
      {
         Debug.Assert(objectPoints.Length == imagePoints.Length, "The number of images for objects points should be equal to the number of images for image points");
         int imageCount = objectPoints.Length;

         #region get the array that represent the point counts
         int[] pointCounts = new int[objectPoints.Length];
         for (int i = 0; i < objectPoints.Length; i++)
         {
            Debug.Assert(objectPoints[i].Length == imagePoints[i].Length, String.Format("Number of 3D points and image points should be equal in the {0}th image", i));
            pointCounts[i] = objectPoints[i].Length;
         }
         #endregion

         double reprojectionError = -1;
         using (Matrix<float> objectPointMatrix = ToMatrix(objectPoints))
         using (Matrix<float> imagePointMatrix = ToMatrix(imagePoints))
         using (Matrix<int> pointCountsMatrix = new Matrix<int>(pointCounts))
         using (Matrix<double> rotationVectors = new Matrix<double>(imageCount, 3))
         using (Matrix<double> translationVectors = new Matrix<double>(imageCount, 3))
         {
            reprojectionError = CvInvoke.cvCalibrateCamera2(
                objectPointMatrix.Ptr,
                imagePointMatrix.Ptr,
                pointCountsMatrix.Ptr,
                imageSize,
                intrinsicParam.IntrinsicMatrix,
                intrinsicParam.DistortionCoeffs,
                rotationVectors,
                translationVectors,
                flags);

            extrinsicParams = new ExtrinsicCameraParameters[imageCount];
            IntPtr matPtr = Marshal.AllocHGlobal(StructSize.MCvMat);
            for (int i = 0; i < imageCount; i++)
            {
               ExtrinsicCameraParameters p = new ExtrinsicCameraParameters();
               CvInvoke.cvGetRow(rotationVectors.Ptr, matPtr, i);
               CvInvoke.cvTranspose(matPtr, p.RotationVector.Ptr);
               CvInvoke.cvGetRow(translationVectors.Ptr, matPtr, i);
               CvInvoke.cvTranspose(matPtr, p.TranslationVector.Ptr);
               extrinsicParams[i] = p;
            }
            Marshal.FreeHGlobal(matPtr);
         }
         return reprojectionError;
      }
Esempio n. 20
0
        /// <summary>
        /// 初始化内部数据
        /// </summary>
        /// <param name="imgsize"></param>
        private void Initializer(Size imgsize)
        {
            _nPointsPerImage = _ChessBoard.BoardRows * _ChessBoard.BoardColumns; // 每幅棋盘的角点数
            _nPoints         = _nPointsPerImage * _ChessBoard.NImages;           // 棋盘角点总数
            _imageSize       = imgsize;                                          // 图像分辨率
            _objectPoints    = new List <MCvPoint3D32f[]>(_ChessBoard.NImages);  //棋盘角点的世界坐标值(三维)
            for (int i = 0; i < _ChessBoard.NImages; i++)
            {
                _objectPoints.Add(new MCvPoint3D32f[_nPointsPerImage]);
            }
            int currentImage;

            for (currentImage = 0; currentImage < _ChessBoard.NImages; currentImage++)
            {
                int currentRow;
                for (currentRow = 0; currentRow < _ChessBoard.BoardRows; currentRow++)
                {
                    int currentCol;
                    for (currentCol = 0; currentCol < _ChessBoard.BoardColumns; currentCol++)
                    {
                        int nPoint = currentRow * _ChessBoard.BoardColumns + currentCol;
                        _objectPoints[currentImage][nPoint].X = (float)currentCol * _ChessBoard.SquareWidth;
                        _objectPoints[currentImage][nPoint].Y = (float)currentRow * _ChessBoard.SquareWidth;
                        _objectPoints[currentImage][nPoint].Z = (float)0.0f;
                    }
                }
            }


            _imagePointsL       = new List <PointF[]>();     // 左视图的棋盘角点像素坐标序列(二维)
            _imagePointsR       = new List <PointF[]>();     // 右视图的棋盘角点像素坐标序列(二维)
            _q                  = new Matrix <double>(4, 4); // 用于计算三维点云的 Q 矩阵
            _roi1               = new Rectangle();           // 左视图有效区域的矩形
            _roi2               = new Rectangle();           // 右视图有效区域的矩形
            _r1                 = new Matrix <double>(3, 3);
            _r2                 = new Matrix <double>(3, 3);
            _p1                 = new Matrix <double>(3, 4);
            _p2                 = new Matrix <double>(3, 4);
            _mx1                = new Matrix <float>(_imageSize);
            _my1                = new Matrix <float>(_imageSize);
            _mx2                = new Matrix <float>(_imageSize);
            _my2                = new Matrix <float>(_imageSize);
            _extrParamsS        = new ExtrinsicCameraParameters(); //立体摄像机外部参数
            _intrParamL         = new IntrinsicCameraParameters(); //左摄像机内参
            _intrParamR         = new IntrinsicCameraParameters(); //右摄像机内参
            _extrParamsL        = new ExtrinsicCameraParameters[_ChessBoard.NImages];
            _extrParamsR        = new ExtrinsicCameraParameters[_ChessBoard.NImages];
            _termCriteria       = new MCvTermCriteria(30, 0.05); //终止准则
            _calibType          = CalibType.FixK3;
            _DoubleCapture.ImgL = new Mat();
            _DoubleCapture.ImgR = new Mat();
        }
Esempio n. 21
0
        public static IList <PointF> NormalizePoints(IList <PointF> points, IntrinsicCameraParameters icp)
        {
            float fx = (float)icp.IntrinsicMatrix[0, 0];
            float fy = (float)icp.IntrinsicMatrix[1, 1];

            IList <PointF> list = new List <PointF>();

            foreach (PointF point in points)
            {
                list.Add(new PointF(point.X / fx, point.Y / fy));
            }
            return(list);
        }
        //called when data for any output pin is requested
        public void Evaluate(int SpreadMax)
        {
            if (FPinInDo[0])
            {
                int nPointsPerImage = FPinInObject.SliceCount;
                if (nPointsPerImage == 0)
                {
                    FStatus[0] = "Insufficient points";
                    return;
                }
                int nImages = Math.Max(FPinInImage1.SliceCount, FPinInImage2.SliceCount) / nPointsPerImage;

                CALIB_TYPE        flags        = CALIB_TYPE.DEFAULT;
                MCvTermCriteria   termCrit     = new MCvTermCriteria(100, 0.001);
                MCvPoint3D32f[][] objectPoints = new MCvPoint3D32f[nImages][];
                PointF[][]        imagePoints1 = new PointF[nImages][];
                PointF[][]        imagePoints2 = new PointF[nImages][];
                Size imageSize = new Size((int)FPinInSensorSize[0].x, (int)FPinInSensorSize[0].y);
                ExtrinsicCameraParameters interCameraExtrinsics;
                Matrix <double>           foundamentalMatrix;
                Matrix <double>           essentialMatrix;
                IntrinsicCameraParameters intrinsics1 = FPinInIntrinsics1[0].intrinsics;
                IntrinsicCameraParameters intrinsics2 = FPinInIntrinsics2[0].intrinsics;

                imagePoints1 = MatrixUtils.ImagePoints(FPinInImage1, nPointsPerImage);
                imagePoints2 = MatrixUtils.ImagePoints(FPinInImage2, nPointsPerImage);

                for (int i = 0; i < nImages; i++)
                {
                    objectPoints[i] = MatrixUtils.ObjectPoints(FPinInObject, true);
                }

                try
                {
                    CameraCalibration.StereoCalibrate(objectPoints, imagePoints1, imagePoints2, intrinsics1, intrinsics2, imageSize, flags, termCrit, out interCameraExtrinsics, out foundamentalMatrix, out essentialMatrix);

                    Extrinsics extrinsics = new Extrinsics(interCameraExtrinsics);
                    FPinOutExtrinsics[0] = extrinsics;
                    FPinOutTransform[0]  = extrinsics.Matrix;

                    FPinOutSuccess[0] = true;
                    FStatus[0]        = "OK";
                }
                catch (Exception e)  {
                    FPinOutSuccess[0] = false;
                    FStatus[0]        = e.Message;
                }
            }
        }
Esempio n. 23
0
        public DistortionParameters(IntrinsicCameraParameters icp)
        {
            this.IntrinsicCameraParameters = icp;

            this.K1 = icp.DistortionCoeffs[0, 0];
            this.K2 = icp.DistortionCoeffs[1, 0];
            this.K3 = icp.DistortionCoeffs[4, 0];
            this.P1 = icp.DistortionCoeffs[2, 0];
            this.P2 = icp.DistortionCoeffs[3, 0];

            this.Fx = icp.IntrinsicMatrix[0, 0];
            this.Fy = icp.IntrinsicMatrix[1, 1];
            this.Cx = icp.IntrinsicMatrix[0, 2];
            this.Cy = icp.IntrinsicMatrix[1, 2];
        }
        public void Initialize(DistortionParameters parameters, Size imageSize)
        {
            if (parameters.Cx == 0 && parameters.Cy == 0)
            {
                this.parameters = new DistortionParameters(imageSize);
            }
            else
            {
                this.parameters = parameters;
            }

            this.imageSize = imageSize;
            icp            = this.parameters.IntrinsicCameraParameters;
            initialized    = true;
        }
Esempio n. 25
0
        public CalibrationResult SolvePnP(Bitmap image, MCvPoint3D32f[] objpoints, IntrinsicCameraParameters camIntrinsic, CheckBoardDefinition pattern)
        {
            //cvCvtColor(image, cv2.COLOR_BGR2GRAY)
            Image <Gray, Byte> gray = new Image <Gray, byte>(image);

            // the fast check flag reduces significantly the computation time if the pattern is out of sight
            PointF[] corners = CameraCalibration.FindChessboardCorners(gray, pattern.Pattern, Emgu.CV.CvEnum.CalibCbType.FastCheck);
            ExtrinsicCameraParameters ret = null;

            if (corners != null)
            {
                ret = CameraCalibration.SolvePnP(objpoints, corners, camIntrinsic);
            }
            return(new CalibrationResult(ret != null, ret));
        }
Esempio n. 26
0
        /// <summary>
        /// Constructor used when the parameters are fit internally.
        /// </summary>
        public DistortionParameters(IntrinsicCameraParameters icp, Size imageSize)
        {
            this.IntrinsicCameraParameters = icp;

            fx = icp.IntrinsicMatrix[0, 0];
            fy = icp.IntrinsicMatrix[1, 1];
            cx = icp.IntrinsicMatrix[0, 2];
            cy = icp.IntrinsicMatrix[1, 2];

            k1 = icp.DistortionCoeffs[0, 0];
            k2 = icp.DistortionCoeffs[1, 0];
            k3 = icp.DistortionCoeffs[4, 0];
            p1 = icp.DistortionCoeffs[2, 0];
            p2 = icp.DistortionCoeffs[3, 0];

            PixelsPerMillimeter = imageSize.Width / defaultSensorWidth;
        }
Esempio n. 27
0
        /// <summary>
        /// Build the object actually used for distortion compensation.
        /// </summary>
        private void Build()
        {
            IntrinsicCameraParameters icp = new IntrinsicCameraParameters();

            icp.DistortionCoeffs[0, 0] = K1;
            icp.DistortionCoeffs[1, 0] = K2;
            icp.DistortionCoeffs[4, 0] = K3;
            icp.DistortionCoeffs[2, 0] = P1;
            icp.DistortionCoeffs[3, 0] = P2;

            icp.IntrinsicMatrix[0, 0] = Fx;
            icp.IntrinsicMatrix[1, 1] = Fy;
            icp.IntrinsicMatrix[0, 2] = Cx;
            icp.IntrinsicMatrix[1, 2] = Cy;
            icp.IntrinsicMatrix[2, 2] = 1;

            this.IntrinsicCameraParameters = icp;
        }
Esempio n. 28
0
        public void TestChessboardCalibration()
        {
            Size patternSize = new Size(9, 6);

            Image <Gray, Byte> chessboardImage = EmguAssert.LoadImage <Gray, byte>("left01.jpg");

            Util.VectorOfPointF corners = new Util.VectorOfPointF();
            bool patternWasFound        = CvInvoke.FindChessboardCorners(chessboardImage, patternSize, corners);

            chessboardImage.FindCornerSubPix(
                new PointF[][] { corners.ToArray() },
                new Size(10, 10),
                new Size(-1, -1),
                new MCvTermCriteria(0.05));

            MCvPoint3D32f[]           objectPts = CalcChessboardCorners(patternSize, 1.0f);
            IntrinsicCameraParameters intrisic  = new IntrinsicCameraParameters(8);

            ExtrinsicCameraParameters[] extrinsic;

            using (VectorOfVectorOfPoint3D32F ptsVec = new VectorOfVectorOfPoint3D32F(new MCvPoint3D32f[][] { objectPts }))
                using (VectorOfVectorOfPointF imgPtsVec = new VectorOfVectorOfPointF(corners))
                {
                    Mat             calMat  = CvInvoke.InitCameraMatrix2D(ptsVec, imgPtsVec, chessboardImage.Size, 0);
                    Matrix <double> calMatF = new Matrix <double>(calMat.Rows, calMat.Cols, calMat.NumberOfChannels);
                    calMat.CopyTo(calMatF);
                }

            double error = CameraCalibration.CalibrateCamera(new MCvPoint3D32f[][] { objectPts }, new PointF[][] { corners.ToArray() },
                                                             chessboardImage.Size, intrisic, CvEnum.CalibType.Default, new MCvTermCriteria(30, 1.0e-10), out extrinsic);

            CvInvoke.DrawChessboardCorners(chessboardImage, patternSize, corners, patternWasFound);
            //CameraCalibration.DrawChessboardCorners(chessboardImage, patternSize, corners);
            Image <Gray, Byte> undistorted = intrisic.Undistort(chessboardImage);

            //UI.ImageViewer.Show(undistorted, String.Format("Reprojection error: {0}", error));

            Mat[] rotationVectors, translationVectors;
            CvInvoke.CalibrateCamera(new MCvPoint3D32f[][] { objectPts }, new PointF[][] { corners.ToArray() },
                                     chessboardImage.Size, intrisic.IntrinsicMatrix, intrisic.DistortionCoeffs, CalibType.Default,
                                     new MCvTermCriteria(30, 1.0e-10),
                                     out rotationVectors, out translationVectors);
        }
Esempio n. 29
0
        public void StereoRectify
            (IntrinsicCameraParameters intrinsicParam1,
            IntrinsicCameraParameters intrinsicParam2,
            Size imageSize,
            ExtrinsicCameraParameters extrinsicParams,
            out Matrix <double> R1,
            out Matrix <double> R2,
            out Matrix <double> P1,
            out Matrix <double> P2,
            out Matrix <double> Q,
            STEREO_RECTIFY_TYPE flags,
            double alpha,
            Size newImageSize,
            ref Rectangle validPixROI1,
            ref Rectangle validPixROI2
            )
        {
            R1 = new Matrix <double>(3, 3);
            R2 = new Matrix <double>(3, 3);
            P1 = new Matrix <double>(3, 4);
            P2 = new Matrix <double>(3, 4);
            Q  = new Matrix <double>(4, 4);

            CvInvoke.cvStereoRectify(
                intrinsicParam1.IntrinsicMatrix.Ptr,
                intrinsicParam2.IntrinsicMatrix.Ptr,
                intrinsicParam1.DistortionCoeffs.Ptr,
                intrinsicParam2.DistortionCoeffs.Ptr,
                imageSize,
                extrinsicParams.RotationVector.Ptr,
                extrinsicParams.TranslationVector.Ptr,
                R1.Ptr,
                R2.Ptr,
                P1.Ptr,
                P2.Ptr,
                Q.Ptr,
                STEREO_RECTIFY_TYPE.DEFAULT,
                alpha,
                newImageSize,
                ref validPixROI1,
                ref validPixROI1);
        }
Esempio n. 30
0
        private void InitializeCameraParameters()
        {
            this.IntrinsicCameraParameters = new IntrinsicCameraParameters();

            // We initialize the intrinsic matrix such that the two focal lengths have a ratio of 1.0
            this.IntrinsicCameraParameters.IntrinsicMatrix[0, 0] = 1.0;
            this.IntrinsicCameraParameters.IntrinsicMatrix[1, 1] = 1.0;

            // Also we start with the assumption that the center is at the center of the image
            FileInfo[] calibrationImageFiles = GetCalibrationImageFiles();

            Image <Bgr, Byte> calibrationImage = new Image <Bgr, Byte>(calibrationImageFiles[0].FullName);

            m_ImageSize = calibrationImage.Size;
            double cx = calibrationImage.Width / 2.0;
            double cy = calibrationImage.Height / 2.0;

            this.IntrinsicCameraParameters.IntrinsicMatrix[0, 2] = cx;
            this.IntrinsicCameraParameters.IntrinsicMatrix[1, 2] = cy;
        }
        // the function that calibrates
        public void calibrate()
        {
            //Extrinsics = new ExtrinsicCameraParameters();
            Intrinsics = new IntrinsicCameraParameters();

            //int width = (int)Cap.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH);
            //int height = (int)Cap.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT);

            int width  = ((VidMode == 0) || (VidMode == 1)) ? 1600 : 800;
            int height = ((VidMode == 0) || (VidMode == 1)) ? 1200 : 600;



            CameraCalibration.CalibrateCamera(ObjectPoints, Points, new Size(width, height), Intrinsics,
                                              Emgu.CV.CvEnum.CALIB_TYPE.DEFAULT, out Extrinsics);

            // clearing the display
            DisplayBox.Image = null;
            // raising a flag
            CalibrationDone = true;
        }
Esempio n. 32
0
      /// <summary>
      /// Estimates extrinsic camera parameters using known intrinsic parameters and extrinsic parameters for each view. The coordinates of 3D object points and their correspondent 2D projections must be specified. This function also minimizes back-projection error. 
      /// </summary>
      /// <param name="objectPoints">The array of object points</param>
      /// <param name="imagePoints">The array of corresponding image points</param>
      /// <param name="intrin">The intrinsic parameters</param>
      /// <returns>The extrinsic parameters</returns>
      public static ExtrinsicCameraParameters FindExtrinsicCameraParams2(
          MCvPoint3D32f[] objectPoints,
          PointF[] imagePoints,
          IntrinsicCameraParameters intrin)
      {
         ExtrinsicCameraParameters p = new ExtrinsicCameraParameters();

         GCHandle handle1 = GCHandle.Alloc(objectPoints, GCHandleType.Pinned);
         GCHandle handle2 = GCHandle.Alloc(imagePoints, GCHandleType.Pinned);
         using (Matrix<float> objectPointMatrix = new Matrix<float>(objectPoints.Length, 3, handle1.AddrOfPinnedObject()))
         using (Matrix<float> imagePointMatrix = new Matrix<float>(imagePoints.Length, 2, handle2.AddrOfPinnedObject()))
            CvInvoke.cvFindExtrinsicCameraParams2(objectPointMatrix, imagePointMatrix, intrin.IntrinsicMatrix.Ptr, intrin.DistortionCoeffs.Ptr, p.RotationVector.Ptr, p.TranslationVector.Ptr, 0);
         handle1.Free();
         handle2.Free();

         return p;
      }
Esempio n. 33
0
      /// <summary>
      /// Computes projections of 3D points to the image plane given intrinsic and extrinsic camera parameters. 
      /// Optionally, the function computes jacobians - matrices of partial derivatives of image points as functions of all the input parameters w.r.t. the particular parameters, intrinsic and/or extrinsic. 
      /// The jacobians are used during the global optimization in cvCalibrateCamera2 and cvFindExtrinsicCameraParams2. 
      /// The function itself is also used to compute back-projection error for with current intrinsic and extrinsic parameters. 
      /// </summary>
      /// <remarks>Note, that with intrinsic and/or extrinsic parameters set to special values, the function can be used to compute just extrinsic transformation or just intrinsic transformation (i.e. distortion of a sparse set of points) </remarks>
      /// <param name="objectPoints">The array of object points.</param>
      /// <param name="extrin">Extrinsic parameters</param>
      /// <param name="intrin">Intrinsic parameters</param>
      /// <param name="mats">Optional matrix supplied in the following order: dpdrot, dpdt, dpdf, dpdc, dpddist</param>
      /// <returns>The array of image points which is the projection of <paramref name="objectPoints"/></returns>
      public static PointF[] ProjectPoints(
          MCvPoint3D32f[] objectPoints,
          ExtrinsicCameraParameters extrin,
          IntrinsicCameraParameters intrin,
          params Matrix<float>[] mats)
      {
         PointF[] imagePoints = new PointF[objectPoints.Length];

         int matsLength = mats.Length;
         GCHandle handle1 = GCHandle.Alloc(objectPoints, GCHandleType.Pinned);
         GCHandle handle2 = GCHandle.Alloc(imagePoints, GCHandleType.Pinned);
         using (Matrix<float> pointMatrix = new Matrix<float>(objectPoints.Length, 1, 3, handle1.AddrOfPinnedObject(), 3 * sizeof(float)))
         using (Matrix<float> imagePointMatrix = new Matrix<float>(imagePoints.Length, 1, 2, handle2.AddrOfPinnedObject(), 2 * sizeof(float)))
            CvInvoke.cvProjectPoints2(
                  pointMatrix,
                  extrin.RotationVector.Ptr,
                  extrin.TranslationVector.Ptr,
                  intrin.IntrinsicMatrix.Ptr,
                  intrin.DistortionCoeffs.Ptr,
                  imagePointMatrix,
                  matsLength > 0 ? mats[0] : IntPtr.Zero,
                  matsLength > 1 ? mats[1] : IntPtr.Zero,
                  matsLength > 2 ? mats[2] : IntPtr.Zero,
                  matsLength > 3 ? mats[3] : IntPtr.Zero,
                  matsLength > 4 ? mats[4] : IntPtr.Zero, 
                  0.0);
         handle1.Free();
         handle2.Free();
         return imagePoints;
      }
Esempio n. 34
0
      /// <summary>
      /// Estimates transformation between the 2 cameras making a stereo pair. If we have a stereo camera, where the relative position and orientatation of the 2 cameras is fixed, and if we computed poses of an object relative to the fist camera and to the second camera, (R1, T1) and (R2, T2), respectively (that can be done with cvFindExtrinsicCameraParams2), obviously, those poses will relate to each other, i.e. given (R1, T1) it should be possible to compute (R2, T2) - we only need to know the position and orientation of the 2nd camera relative to the 1st camera. That's what the described function does. It computes (R, T) such that:
      /// R2=R*R1,
      /// T2=R*T1 + T
      /// </summary>
      /// <param name="objectPoints">The 3D location of the object points. The first index is the index of image, second index is the index of the point</param>
      /// <param name="imagePoints1">The 2D image location of the points for camera 1. The first index is the index of the image, second index is the index of the point</param>
      /// <param name="imagePoints2">The 2D image location of the points for camera 2. The first index is the index of the image, second index is the index of the point</param>
      /// <param name="intrinsicParam1">The intrisinc parameters for camera 1, might contains some initial values. The values will be modified by this function.</param>
      /// <param name="intrinsicParam2">The intrisinc parameters for camera 2, might contains some initial values. The values will be modified by this function.</param>
      /// <param name="imageSize">Size of the image, used only to initialize intrinsic camera matrix</param>
      /// <param name="flags">Different flags</param>
      /// <param name="extrinsicParams">The extrinsic parameters which contains:
      /// R - The rotation matrix between the 1st and the 2nd cameras' coordinate systems; 
      /// T - The translation vector between the cameras' coordinate systems. </param>
      /// <param name="essentialMatrix">The essential matrix</param>
      /// <param name="termCrit">Termination criteria for the iterative optimiziation algorithm </param>
      /// <param name="foundamentalMatrix">The fundamental matrix</param>
      public static void StereoCalibrate(
         MCvPoint3D32f[][] objectPoints,
         PointF[][] imagePoints1,
         PointF[][] imagePoints2,
         IntrinsicCameraParameters intrinsicParam1,
         IntrinsicCameraParameters intrinsicParam2,
         Size imageSize,
         CvEnum.CALIB_TYPE flags,
         MCvTermCriteria termCrit,
         out ExtrinsicCameraParameters extrinsicParams,
         out Matrix<double> foundamentalMatrix,
         out Matrix<double> essentialMatrix)
      {
         Debug.Assert(objectPoints.Length == imagePoints1.Length && objectPoints.Length == imagePoints2.Length, "The number of images for objects points should be equal to the number of images for image points");

         #region get the matrix that represent the point counts
         int[,] pointCounts = new int[objectPoints.Length, 1];
         for (int i = 0; i < objectPoints.Length; i++)
         {
            Debug.Assert(objectPoints[i].Length == imagePoints1[i].Length && objectPoints[i].Length == imagePoints2[i].Length, String.Format("Number of 3D points and image points should be equal in the {0}th image", i));
            pointCounts[i, 0] = objectPoints[i].Length;
         }
         #endregion

         using (Matrix<float> objectPointMatrix = ToMatrix(objectPoints))
         using (Matrix<float> imagePointMatrix1 = ToMatrix(imagePoints1))
         using (Matrix<float> imagePointMatrix2 = ToMatrix(imagePoints2))
         using (Matrix<int> pointCountsMatrix = new Matrix<int>(pointCounts))
         {
            extrinsicParams = new ExtrinsicCameraParameters();
            essentialMatrix = new Matrix<double>(3, 3);
            foundamentalMatrix = new Matrix<double>(3, 3);

            CvInvoke.cvStereoCalibrate(
               objectPointMatrix.Ptr,
               imagePointMatrix1.Ptr,
               imagePointMatrix2.Ptr,
               pointCountsMatrix.Ptr,
               intrinsicParam1.IntrinsicMatrix,
               intrinsicParam1.DistortionCoeffs,
               intrinsicParam2.IntrinsicMatrix,
               intrinsicParam2.DistortionCoeffs,
               imageSize,
               extrinsicParams.RotationVector,
               extrinsicParams.TranslationVector,
               essentialMatrix.Ptr,
               foundamentalMatrix.Ptr,
               termCrit,
               flags);
         }
      }