Exemplo n.º 1
0
        // EMGU's calibrate camera method has a bug.
        // Refer this case: https://stackoverflow.com/questions/33127581/how-do-i-access-the-rotation-and-translation-vectors-after-camera-calibration-in
        public static double CalibrateCamera(MCvPoint3D32f[][] objectPoints, PointF[][] imagePoints, Size imageSize, IInputOutputArray cameraMatrix, IInputOutputArray distortionCoeffs, CalibType calibrationType, MCvTermCriteria termCriteria, out Mat[] rotationVectors, out Mat[] translationVectors)
        {
            System.Diagnostics.Debug.Assert(objectPoints.Length == imagePoints.Length, "The number of images for objects points should be equal to the number of images for image points");
            int imageCount = objectPoints.Length;

            using (VectorOfVectorOfPoint3D32F vvObjPts = new VectorOfVectorOfPoint3D32F(objectPoints))
                using (VectorOfVectorOfPointF vvImgPts = new VectorOfVectorOfPointF(imagePoints)) {
                    double reprojectionError;
                    using (VectorOfMat rVecs = new VectorOfMat())
                        using (VectorOfMat tVecs = new VectorOfMat()) {
                            reprojectionError  = CvInvoke.CalibrateCamera(vvObjPts, vvImgPts, imageSize, cameraMatrix, distortionCoeffs, rVecs, tVecs, calibrationType, termCriteria);
                            rotationVectors    = new Mat[imageCount];
                            translationVectors = new Mat[imageCount];
                            for (int i = 0; i < imageCount; i++)
                            {
                                rotationVectors[i] = new Mat();
                                using (Mat matR = rVecs[i])
                                    matR.CopyTo(rotationVectors[i]);
                                translationVectors[i] = new Mat();
                                using (Mat matT = tVecs[i])
                                    matT.CopyTo(translationVectors[i]);
                            }
                        }
                    return(reprojectionError);
                }
        }
        private void startCalibrate()
        {
            CvInvoke.CalibrateCamera(objectPoints, cameraPoints, calibrateImages[0].Size, cameraMatrix, distof, Emgu.CV.CvEnum.CalibType.Default, new MCvTermCriteria(3), out rotateVec, out tVec);
            Image <Gray, float> cameraIn = cameraMatrix.ToImage <Gray, float>();
            Image <Gray, float> dof      = distof.ToImage <Gray, float>();

            for (int i = 0; i < tVec.Length; i++)
            {
                Image <Gray, float> tImage = tVec[i].ToImage <Gray, float>();
                Image <Gray, float> rImage = rotateVec[i].ToImage <Gray, float>();
                tVectors.Add(tImage);
                rotateVectors.Add(rImage);
            }

            StreamWriter writer = new StreamWriter(Application.StartupPath + "\\CameraParameters.txt");

            writer.WriteLine(cameraIn.Data[0, 0, 0] + "," + cameraIn.Data[0, 1, 0] + "," + cameraIn.Data[0, 2, 0]);
            writer.WriteLine(cameraIn.Data[1, 0, 0] + "," + cameraIn.Data[1, 1, 0] + "," + cameraIn.Data[1, 2, 0]);
            writer.WriteLine(cameraIn.Data[2, 0, 0] + "," + cameraIn.Data[2, 1, 0] + "," + cameraIn.Data[2, 2, 0]);
            writer.WriteLine(dof.Data[0, 0, 0]);
            writer.WriteLine(dof.Data[0, 1, 0]);
            writer.WriteLine(dof.Data[0, 2, 0]);
            writer.WriteLine(dof.Data[0, 3, 0]);
            writer.WriteLine(dof.Data[0, 4, 0]);
            writer.Flush();
            writer.Close();
            MessageBox.Show("计算畸变以及相机内参完毕,储存完毕");
        }
Exemplo n.º 3
0
        public void TestChessboardCalibration()
        {
            Size patternSize = new Size(9, 6);

            Image <Gray, Byte> chessboardImage = EmguAssert.LoadImage <Gray, byte>("left01.jpg");

            Util.VectorOfPointF corners = new Util.VectorOfPointF();
            bool patternWasFound        = CvInvoke.FindChessboardCorners(chessboardImage, patternSize, corners);

            chessboardImage.FindCornerSubPix(
                new PointF[][] { corners.ToArray() },
                new Size(10, 10),
                new Size(-1, -1),
                new MCvTermCriteria(0.05));

            MCvPoint3D32f[] objectPts = CalcChessboardCorners(patternSize, 1.0f);

            using (VectorOfVectorOfPoint3D32F ptsVec = new VectorOfVectorOfPoint3D32F(new MCvPoint3D32f[][] { objectPts }))
                using (VectorOfVectorOfPointF imgPtsVec = new VectorOfVectorOfPointF(corners))
                    using (Mat cameraMatrix = new Mat())
                        using (Mat distortionCoeff = new Mat())
                            using (VectorOfMat rotations = new VectorOfMat())
                                using (VectorOfMat translations = new VectorOfMat())
                                {
                                    Mat             calMat  = CvInvoke.InitCameraMatrix2D(ptsVec, imgPtsVec, chessboardImage.Size, 0);
                                    Matrix <double> calMatF = new Matrix <double>(calMat.Rows, calMat.Cols, calMat.NumberOfChannels);
                                    calMat.CopyTo(calMatF);
                                    double error = CvInvoke.CalibrateCamera(ptsVec, imgPtsVec, chessboardImage.Size, cameraMatrix,
                                                                            distortionCoeff,
                                                                            rotations, translations, CalibType.Default, new MCvTermCriteria(30, 1.0e-10));
                                    using (Mat rotation = new Mat())
                                        using (Mat translation = new Mat())
                                            using (VectorOfPoint3D32F vpObject = new VectorOfPoint3D32F(objectPts))
                                            {
                                                CvInvoke.SolvePnPRansac(
                                                    vpObject,
                                                    corners,
                                                    cameraMatrix,
                                                    distortionCoeff,
                                                    rotation,
                                                    translation,
                                                    true);
                                            }

                                    CvInvoke.DrawChessboardCorners(chessboardImage, patternSize, corners, patternWasFound);
                                    using (Mat undistorted = new Mat())
                                    {
                                        CvInvoke.Undistort(chessboardImage, undistorted, cameraMatrix, distortionCoeff);
                                        String title = String.Format("Reprojection error: {0}", error);
                                        //CvInvoke.NamedWindow(title);
                                        //CvInvoke.Imshow(title, undistorted);
                                        //CvInvoke.WaitKey();
                                        //UI.ImageViewer.Show(undistorted, String.Format("Reprojection error: {0}", error));
                                    }
                                }
        }
        private Calibration Calibrate(PointF[][] patterns, IObservable <MCvPoint3D32f[]> realCorners, ImageSize size)
        {
            var objectPoints = realCorners
                               .Repeat(patterns.Length)
                               .ToArray()
                               .Wait();

            var c = new Calibration();

            Mat[] rvecs, tvecs;
            c.Error = CvInvoke.CalibrateCamera(objectPoints, patterns, size, c.CameraMatrix, c.DistortionCoefficients, CalibType.RationalModel, new MCvTermCriteria(30, 0.1), out rvecs, out tvecs);

            return(c);
        }
Exemplo n.º 5
0
        public static (Mat cameraMatrix, Mat distCoeffs, double rms) CalibrateCharuco(int squaresX, int squaresY, float squareLength, float markerLength, PredefinedDictionaryName dictionary, Size imageSize, VectorOfInt charucoIds, VectorOfPointF charucoCorners, VectorOfInt markerCounterPerFrame, bool fisheye, Func <byte[], byte[]> GetRemoteChessboardCorner)
        {
            Mat    cameraMatrix = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            Mat    distCoeffs   = new Mat(1, 4, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            double rms          = 0.0;

            VectorOfVectorOfPoint3D32F processedObjectPoints = new VectorOfVectorOfPoint3D32F();
            VectorOfVectorOfPointF     processedImagePoints  = new VectorOfVectorOfPointF();

            int k = 0;

            for (int i = 0; i < markerCounterPerFrame.Size; i++)
            {
                int                nMarkersInThisFrame = markerCounterPerFrame[i];
                VectorOfPointF     currentImgPoints    = new VectorOfPointF();
                VectorOfPoint3D32F currentObjPoints    = new VectorOfPoint3D32F();

                for (int j = 0; j < nMarkersInThisFrame; j++)
                {
                    currentImgPoints.Push(new PointF[] { charucoCorners[k] });
                    currentObjPoints.Push(new MCvPoint3D32f[] { GetChessboardCorner(squaresX, squaresY, squareLength, markerLength, charucoIds[k], dictionary, GetRemoteChessboardCorner) });
                    k++;
                }

                processedImagePoints.Push(currentImgPoints);
                processedObjectPoints.Push(currentObjPoints);
            }

            VectorOfPoint3D32F rvecs = new VectorOfPoint3D32F();
            VectorOfPoint3D32F tvecs = new VectorOfPoint3D32F();

            if (fisheye)
            {
                Fisheye.Calibrate(processedObjectPoints, processedImagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, Fisheye.CalibrationFlag.FixSkew | Fisheye.CalibrationFlag.RecomputeExtrinsic, new MCvTermCriteria(400, double.Epsilon));
            }
            else
            {
                CvInvoke.CalibrateCamera(processedObjectPoints, processedImagePoints, imageSize, cameraMatrix, distCoeffs, new Mat(), new Mat(), CalibType.FixK3, new MCvTermCriteria(30, 1e-4));
            }

            rms = Validate(processedObjectPoints, processedImagePoints, cameraMatrix, distCoeffs, rvecs, tvecs, fisheye);

            return(cameraMatrix, distCoeffs, rms);
        }
Exemplo n.º 6
0
        public void TestChessboardCalibration()
        {
            Size patternSize = new Size(9, 6);

            Image <Gray, Byte> chessboardImage = EmguAssert.LoadImage <Gray, byte>("left01.jpg");

            Util.VectorOfPointF corners = new Util.VectorOfPointF();
            bool patternWasFound        = CvInvoke.FindChessboardCorners(chessboardImage, patternSize, corners);

            chessboardImage.FindCornerSubPix(
                new PointF[][] { corners.ToArray() },
                new Size(10, 10),
                new Size(-1, -1),
                new MCvTermCriteria(0.05));

            MCvPoint3D32f[]           objectPts = CalcChessboardCorners(patternSize, 1.0f);
            IntrinsicCameraParameters intrisic  = new IntrinsicCameraParameters(8);

            ExtrinsicCameraParameters[] extrinsic;

            using (VectorOfVectorOfPoint3D32F ptsVec = new VectorOfVectorOfPoint3D32F(new MCvPoint3D32f[][] { objectPts }))
                using (VectorOfVectorOfPointF imgPtsVec = new VectorOfVectorOfPointF(corners))
                {
                    Mat             calMat  = CvInvoke.InitCameraMatrix2D(ptsVec, imgPtsVec, chessboardImage.Size, 0);
                    Matrix <double> calMatF = new Matrix <double>(calMat.Rows, calMat.Cols, calMat.NumberOfChannels);
                    calMat.CopyTo(calMatF);
                }

            double error = CameraCalibration.CalibrateCamera(new MCvPoint3D32f[][] { objectPts }, new PointF[][] { corners.ToArray() },
                                                             chessboardImage.Size, intrisic, CvEnum.CalibType.Default, new MCvTermCriteria(30, 1.0e-10), out extrinsic);

            CvInvoke.DrawChessboardCorners(chessboardImage, patternSize, corners, patternWasFound);
            //CameraCalibration.DrawChessboardCorners(chessboardImage, patternSize, corners);
            Image <Gray, Byte> undistorted = intrisic.Undistort(chessboardImage);

            //UI.ImageViewer.Show(undistorted, String.Format("Reprojection error: {0}", error));

            Mat[] rotationVectors, translationVectors;
            CvInvoke.CalibrateCamera(new MCvPoint3D32f[][] { objectPts }, new PointF[][] { corners.ToArray() },
                                     chessboardImage.Size, intrisic.IntrinsicMatrix, intrisic.DistortionCoeffs, CalibType.Default,
                                     new MCvTermCriteria(30, 1.0e-10),
                                     out rotationVectors, out translationVectors);
        }
        public bool calibrate(float squareEdge, Size patternSize, string[] images)
        {
            VectorOfVectorOfPointF corners = findCorners(squareEdge, patternSize, images);

            if (corners.Size == 0)
            {
                Console.WriteLine("Cannot find chessboard!");
                return(false);
            }

            VectorOfPoint3D32F         chessboard   = getChessboardCorners(squareEdge, patternSize);
            VectorOfVectorOfPoint3D32F objectPoints = new VectorOfVectorOfPoint3D32F();

            for (int i = corners.Size; i > 0; i--)
            {
                objectPoints.Push(chessboard);
            }

            CameraParam param = new CameraParam();

            // set mats
            Mat rotationMat    = new Mat();
            Mat translationMat = new Mat();

            Image <Gray, Byte> image = new Image <Gray, Byte>(images[0]);

            imgSize = image.Size;

            CvInvoke.CalibrateCamera(
                objectPoints,
                corners,
                image.Size,
                param.cameraMatrix.Mat,
                param.distortionCoeffs.Mat,
                rotationMat,
                translationMat,
                CalibType.Default,
                new MCvTermCriteria(30, 0.1));

            cameraParam.Clear();
            cameraParam.Add(param);
            return(_isCalibrated = true);
        }
Exemplo n.º 8
0
        public static double CalibrateRGBCAM()
        {
            Size patternSize = new Size(width, height);

            string[] fileEntries = Directory.GetFiles(@"..\..\..\..\rgb image", "*.jpg");

            Image <Gray, Byte>[] Frame_array_buffer  = new Image <Gray, byte> [fileEntries.Length]; //number of images to calibrate camera over
            MCvPoint3D32f[][]    corners_object_list = new MCvPoint3D32f[Frame_array_buffer.Length][];
            PointF[][]           corners_points_list = new PointF[Frame_array_buffer.Length][];
            VectorOfPointF[]     _cornersPointsVec   = new VectorOfPointF[Frame_array_buffer.Length];
            Mat[] _rvecs, _tvecs;

            for (int k = 0; k < Frame_array_buffer.Length; k++)
            {
                Frame_array_buffer[k] = new Image <Gray, byte>(fileEntries[k]);
                _cornersPointsVec[k]  = new VectorOfPointF();
                CvInvoke.FindChessboardCorners(Frame_array_buffer[k], patternSize, _cornersPointsVec[k], CalibCbType.AdaptiveThresh | CalibCbType.FilterQuads);
                //for accuracy
                CvInvoke.CornerSubPix(Frame_array_buffer[k], _cornersPointsVec[k], new Size(11, 11), new Size(-1, -1), new MCvTermCriteria(30, 0.1));

                //Fill our objects list with the real world mesurments for the intrinsic calculations
                List <MCvPoint3D32f> object_list = new List <MCvPoint3D32f>();
                for (int i = 0; i < height; i++)
                {
                    for (int j = 0; j < width; j++)
                    {
                        object_list.Add(new MCvPoint3D32f(j * 20.0F, i * 20.0F, 0.0F));
                    }
                }
                corners_object_list[k] = object_list.ToArray();
                corners_points_list[k] = _cornersPointsVec[k].ToArray();
            }


            double error = CvInvoke.CalibrateCamera(corners_object_list, corners_points_list, Frame_array_buffer[0].Size,
                                                    _cameraMatrix, _distCoeffs, CalibType.RationalModel, new MCvTermCriteria(30, 0.1), out _rvecs, out _tvecs);

            //If Emgu.CV.CvEnum.CALIB_TYPE == CV_CALIB_USE_INTRINSIC_GUESS and/or CV_CALIB_FIX_ASPECT_RATIO are specified, some or all of fx, fy, cx, cy must be initialized before calling the function
            //if you use FIX_ASPECT_RATIO and FIX_FOCAL_LEGNTH options, these values needs to be set in the intrinsic parameters before the CalibrateCamera function is called. Otherwise 0 values are used as default.
            Console.WriteLine("Intrinsci Calculation Error: " + error.ToString(), "Results"); //display the results to the user
            return(error);
        }
Exemplo n.º 9
0
        /// <summary>
        /// Kalibrácia kamery
        /// </summary>
        /// <param name="imageSize">veľkosť obrázka</param>
        /// <returns>vracia chybu re-projekcie</returns>
        private double Calibrate(Size imageSize)
        {
            //undistorter must be reinitialized???
            this.MustInitUndistort = true;

            MCvPoint3D32f[][] objectPoints = this.ObjectPoints.ToArray();
            PointF[][]        imagePoints  = this.ImagePoints.ToArray(); // ToArray();
                                                                         //VectorOfPoint objectPoints = this.ObjectPoints.ToArray();
                                                                         //VectorOfPoint[] imagePoints = this.ImagePoints.ToArray();

            cameraMatrix     = new Mat(3, 3, DepthType.Cv64F, 1);
            distortionCoeffs = new Mat(8, 1, DepthType.Cv64F, 1);
            //rotationVectors = new Mat[1];
            //translationVectors = new Mat[1];

            //this.IntrinsicParams = new IntrinsicCameraParameters();
            //ExtrinsicCameraParameters[] extrinsicParams;

            //najdenie matic vnutornych a vonkajsich parametrov kamery
            double vysledok = CvInvoke.CalibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distortionCoeffs, CalibType.Default, new MCvTermCriteria(30, 0.1), out rotationVectors, out translationVectors);

            return(vysledok);
        }
Exemplo n.º 10
0
        static void Main(string[] args)
        {
            MCvPoint3D32f objectp_1 = new MCvPoint3D32f(1f, 1f, 1f);
            MCvPoint3D32f objectp_2 = new MCvPoint3D32f(1f, 1f, 1f);
            MCvPoint3D32f objectp_3 = new MCvPoint3D32f(1f, 1f, 1f);

            //List<MCvPoint3D32f> objectPoints = new List<MCvPoint3D32f> { objectp_1, objectp_2, objectp_3 };
            MCvPoint3D32f[][] objectPoints = new MCvPoint3D32f[][] { new MCvPoint3D32f[]
                                                                     { objectp_1, objectp_1, objectp_1, objectp_1 } };



            PointF imagep_1 = new PointF(1f, 1f);
            PointF imagep_2 = new PointF(1f, 1f);
            PointF imagep_3 = new PointF(1f, 1f);

            //List<PointF> imagePoints = new List<PointF> { imagep_1, imagep_2, imagep_3 };
            PointF[][] imagePoints = new PointF[][] { new PointF[] { imagep_1, imagep_1, imagep_1, imagep_1 } };

            Size imageSize = new Size(500, 500);

            Mat cameraMat = new Mat(new Size(3, 3), DepthType.Cv32F, 1);

            cameraMat.SetValue(0, 0, 302);
            cameraMat.SetValue(0, 1, 0);
            cameraMat.SetValue(0, 2, 101);
            cameraMat.SetValue(1, 0, 0);
            cameraMat.SetValue(1, 1, 411);
            cameraMat.SetValue(1, 2, 106);
            cameraMat.SetValue(2, 0, 0);
            cameraMat.SetValue(2, 1, 0);
            cameraMat.SetValue(2, 1, 1);

            Matrix <double> cameraMatrix = new Matrix <double>(new double[, ] {
                { 302, 0, 101 }, { 0, 411, 106 }, { 0, 0, 1 }
            });

            cameraMat.ToImage <Gray, byte>().Save("test.jpg");
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 0, 0, 302);
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 0, 1, 0);
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 0, 2, 101);
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 1, 0, 0);
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 1, 1, 411);
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 1, 2, 106);
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 2, 0, 0);
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 2, 1, 0);
            //CvInvoke.cvSetReal2D(cameraMat.DataPointer, 2, 2, 1);

            Emgu.CV.CvEnum.CalibType calibrationType = Emgu.CV.CvEnum.CalibType.UseIntrinsicGuess;

            Emgu.CV.Structure.MCvTermCriteria termCriteria = new Emgu.CV.Structure.MCvTermCriteria(50);

            Mat _distortionCoeffs = new Mat(new Size(1, 5), DepthType.Cv32F, 1);


            Emgu.CV.ExtrinsicCameraParameters[] extrinsicParams;

            Mat[] rotation;    // = new Mat(new Size(3, 3), DepthType.Cv32F, 1);
            Mat[] translation; //= new Mat(new Size(3, 3), DepthType.Cv32F, 1);

            var    result = CvInvoke.CalibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, _distortionCoeffs, calibrationType, termCriteria, out rotation, out translation);
            double t      = rotation[0].GetValue(0, 0);
            double t2     = rotation[0].GetValue(2, 0);
        }
Exemplo n.º 11
0
        bool patternRightFound;       // True if chessboard found in image
        #endregion

        /**********************************************************
        * Calculates calibration transformatons and saves them.
        **********************************************************/
        public void Calibration()
        {
            string imagesPath = calibrationPath.Text;

            if (imageCalibration != true)
            {
                try
                {
                    FileStorage fs = new FileStorage(calibrationPath.Text, FileStorage.Mode.Read);
                    fs["rmapx1"].ReadMat(rmapx1);
                    fs["rmapy1"].ReadMat(rmapy1);
                    fs["rmapx2"].ReadMat(rmapx2);
                    fs["rmapy2"].ReadMat(rmapy2);

                    string rec1Str = fs["Rec1"].ReadString();
                    string rec2Str = fs["Rec2"].ReadString();

                    int idx1 = rec1Str.IndexOf('=');
                    int idx2 = rec1Str.IndexOf(',');
                    int x    = Convert.ToInt32(rec1Str.Substring(idx1 + 1, idx2 - idx1 - 1));
                    idx1 = rec1Str.IndexOf('=', idx2);
                    idx2 = rec1Str.IndexOf(',', idx1);
                    int y = Convert.ToInt32(rec1Str.Substring(idx1 + 1, idx2 - idx1 - 1));
                    idx1 = rec1Str.IndexOf('=', idx2);
                    idx2 = rec1Str.IndexOf(',', idx1);
                    int w = Convert.ToInt32(rec1Str.Substring(idx1 + 1, idx2 - idx1 - 1));
                    idx1 = rec1Str.IndexOf('=', idx2);
                    idx2 = rec1Str.Length;
                    int h = Convert.ToInt32(rec1Str.Substring(idx1 + 1, idx2 - idx1 - 1));

                    Rec1 = new Rectangle(x, y, w, h);

                    idx1 = rec2Str.IndexOf('=');
                    idx2 = rec2Str.IndexOf(',');
                    x    = Convert.ToInt32(rec2Str.Substring(idx1 + 1, idx2 - idx1 - 1));
                    idx1 = rec2Str.IndexOf('=', idx2);
                    idx2 = rec2Str.IndexOf(',', idx1);
                    y    = Convert.ToInt32(rec2Str.Substring(idx1 + 1, idx2 - idx1 - 1));
                    idx1 = rec2Str.IndexOf('=', idx2);
                    idx2 = rec2Str.IndexOf(',', idx1);
                    w    = Convert.ToInt32(rec2Str.Substring(idx1 + 1, idx2 - idx1 - 1));
                    idx1 = rec2Str.IndexOf('=', idx2);
                    idx2 = rec2Str.Length;
                    h    = Convert.ToInt32(rec2Str.Substring(idx1 + 1, idx2 - idx1 - 1));

                    Rec2 = new Rectangle(x, y, w, h);

                    MessageBox.Show("Transformation maps loaded successfully");
                }
                catch (Exception)
                {
                    MessageBox.Show("Error: Problem loading Transformation maps");
                    Environment.Exit(1);
                }
            }
            if (imageCalibration == true)
            {
                for (int i = 0; i < bufferLength * 2; i++)
                {
                    chessFrameL = CvInvoke.Imread(imagesPath + "\\camera1\\image_" + i.ToString() + ".jpg");
                    chessFrameR = CvInvoke.Imread(imagesPath + "\\camera2\\image_" + i.ToString() + ".jpg");

                    patternLeftFound  = CvInvoke.FindChessboardCorners(chessFrameL, patternSize, cornersVecLeft, CalibCbType.NormalizeImage | CalibCbType.AdaptiveThresh);
                    patternRightFound = CvInvoke.FindChessboardCorners(chessFrameR, patternSize, cornersVecRight, CalibCbType.NormalizeImage | CalibCbType.AdaptiveThresh);

                    if (patternLeftFound && patternRightFound)
                    {
                        CvInvoke.CvtColor(chessFrameL, grayLeft, ColorConversion.Bgr2Gray);
                        CvInvoke.CvtColor(chessFrameR, grayRight, ColorConversion.Bgr2Gray);

                        CvInvoke.DrawChessboardCorners(chessFrameL, patternSize, cornersVecLeft, patternLeftFound);
                        CvInvoke.DrawChessboardCorners(chessFrameR, patternSize, cornersVecRight, patternRightFound);

                        CvInvoke.Imshow("Calibration image left", chessFrameL);
                        CvInvoke.Imshow("Calibration image right", chessFrameR);
                        //chessFrameL.Save(desktop + "\\Left_" + i + ".jpg");
                        //chessFrameR.Save(desktop + "\\Right" + i + ".jpg");

                        CvInvoke.WaitKey(10);

                        imagePoints1[bufferSavepoint] = cornersVecLeft.ToArray();
                        imagePoints2[bufferSavepoint] = cornersVecRight.ToArray();
                        bufferSavepoint++;
                        if (bufferSavepoint == bufferLength)
                        {
                            break;
                        }
                    }
                }
                CvInvoke.DestroyAllWindows();
                //fill the MCvPoint3D32f with correct mesurments
                for (int k = 0; k < bufferLength; k++)
                {
                    //Fill our objects list with the real world mesurments for the intrinsic calculations
                    List <MCvPoint3D32f> object_list = new List <MCvPoint3D32f>();
                    for (int i = 0; i < height; i++)
                    {
                        for (int j = 0; j < width; j++)
                        {
                            object_list.Add(new MCvPoint3D32f(j * 20.0F, i * 20.0F, 0.0F));
                        }
                    }
                    cornersObjectPoints[k] = object_list.ToArray();
                }

                CvInvoke.CalibrateCamera(cornersObjectPoints, imagePoints1, chessFrameL.Size, camMat1, dist1, CalibType.Default, new MCvTermCriteria(200, 1e-5), out rvecs, out tvecs);
                CvInvoke.CalibrateCamera(cornersObjectPoints, imagePoints2, chessFrameL.Size, camMat2, dist2, CalibType.Default, new MCvTermCriteria(200, 1e-5), out rvecs, out tvecs);

                CvInvoke.StereoCalibrate(cornersObjectPoints, imagePoints1, imagePoints2, camMat1, dist1, camMat2, dist2, chessFrameL.Size,
                                         R, T, essential, fundamental, CalibType.FixAspectRatio | CalibType.ZeroTangentDist | CalibType.SameFocalLength | CalibType.RationalModel | CalibType.UseIntrinsicGuess | CalibType.FixK3 | CalibType.FixK4 | CalibType.FixK5, new MCvTermCriteria(100, 1e-5));

                CvInvoke.StereoRectify(camMat1, dist1, camMat2, dist2, chessFrameL.Size, R, T, R1, R2, P1, P2, Q, StereoRectifyType.CalibZeroDisparity, 0,
                                       chessFrameL.Size, ref Rec1, ref Rec2);

                // Create transformation maps
                CvInvoke.InitUndistortRectifyMap(camMat1, dist1, R1, P1, chessFrameL.Size, DepthType.Cv32F, rmapx1, rmapy1);
                CvInvoke.InitUndistortRectifyMap(camMat2, dist2, R2, P2, chessFrameL.Size, DepthType.Cv32F, rmapx2, rmapy2);
                MessageBox.Show("Calibration has ended\n Connect to TELLO access point");
                try
                {
                    FileStorage fs = new FileStorage(calibrationPath.Text + "\\calibMaps.xml", FileStorage.Mode.Write);
                    fs.Write(rmapx1, "rmapx1");
                    fs.Write(rmapy1, "rmapy1");
                    fs.Write(rmapx2, "rmapx2");
                    fs.Write(rmapy2, "rmapy2");
                    fs.Write(Rec1.ToString(), "Rec1");
                    fs.Write(Rec2.ToString(), "Rec2");
                    MessageBox.Show("Transformation maps saved successfully");
                }
                catch (Exception)
                {
                    MessageBox.Show("Error: Problem saving Transformation maps");
                    Environment.Exit(1);
                }
            }
        }
Exemplo n.º 12
0
        private void CalibrateCamera()
        {
            const int width       = 5;                       //5 //width of chessboard no. squares in width - 1
            const int height      = 5;                       //5 // heght of chess board no. squares in heigth - 1
            Size      patternSize = new Size(width, height); //size of chess board to be detected

            MCvPoint3D32f[][] corners_object_list = new MCvPoint3D32f[6][];
            PointF[][]        corners_points_list = new PointF[6][];


            for (int k = 0; k < 6; k++)
            {
                corners_object_list[k] = new MCvPoint3D32f[width * height];
                for (int i = 0; i < 5; i++)
                {
                    for (int j = 0; j < 5; j++)
                    {
                        corners_object_list[k][5 * i + j] = new MCvPoint3D32f((4 - i) * 29, (4 - j) * 29, 8);
                    }
                }
            }



            var  output         = new Emgu.CV.Util.VectorOfPointF();
            Size smallerPicSize = new Size(816, 612);

            for (int k = 1; k <= 6; k++)
            {
                Mat imgCam     = new Mat(k + ".jpg", LoadImageType.Unchanged);//load picture of chessboard
                Mat smallerPic = new Mat();

                Size PicSize = new Size(3264, 2448);
                CvInvoke.Resize(imgCam, smallerPic, smallerPicSize);

                if (k == 1)
                {
                    smallerPic.Save("small1.jpg");
                }

                //CvInvoke.Imshow("small", smallerPic);

                bool found = CvInvoke.FindChessboardCorners(smallerPic, patternSize, output);//find chessboard
                Console.WriteLine("found:" + found);
                corners_points_list[k - 1] = output.ToArray();
            }

            for (int i = 0; i < output.Size; i++)
            {
                Console.WriteLine(corners_points_list[0].GetValue(i));
            }

            Mat cameraMat  = new Mat();
            Mat distorCoef = new Mat();

            Mat[] rotationVec = new Mat[6];

            Mat[] translationVec = new Mat[6];
            for (int k = 0; k < 6; k++)
            {
                translationVec[k] = new Mat();
                rotationVec[k]    = new Mat();
            }

            MCvTermCriteria criteria = new MCvTermCriteria();

            double rms = CvInvoke.CalibrateCamera(corners_object_list, corners_points_list, smallerPicSize, cameraMat, distorCoef, CalibType.RationalModel, criteria, out rotationVec, out translationVec);


            cameraPar = new float[9];
            double[] cameraParDouble = new Double[9];
            cameraMat.CopyTo(cameraParDouble);
            for (int i = 0; i < 9; i++)
            {
                cameraPar[i] = (float)cameraParDouble[i];
            }


            //1 by 14 array of distortion coeff, only first 8 important
            double[] distortArr = new double[14];
            distorCoef.CopyTo(distortArr);

            //1 by 3 array of rotate Matrix
            rotateArr = new float[9];
            Mat rotationMatrix = new Mat();

            //need to flip stuff
            //double[] rv = new double[3];
            //rotationVec[0].CopyTo(rv);
            //rv[1] = -1.0f * rv[1]; rv[2] = -1.0f * rv[2];
            //rotationVec[0].SetTo(rv);
            CvInvoke.Rodrigues(rotationVec[0], rotationMatrix);
            double[] rotateArrDouble = new double[9];
            rotationMatrix.CopyTo(rotateArrDouble);
            for (int i = 0; i < 9; i++)
            {
                rotateArr[i] = (float)rotateArrDouble[i];
            }


            //1 by 3 array of translate Matrix
            translateArr = new float[3];
            double[] translateArrDouble = new double[3];
            translationVec[0].CopyTo(translateArrDouble);
            for (int i = 0; i < 3; i++)
            {
                translateArr[i] = (float)translateArrDouble[i];
            }


            for (int i = 0; i < 3; i++)
            {
                Console.WriteLine(rotateArr[i]);
            }

            for (int i = 0; i < 3; i++)
            {
                Console.WriteLine(translateArr[i]);
            }

            //CvInvoke.Imshow("chessboard", imgCam);

            Console.WriteLine(rms);

            FileStorage fs = new FileStorage("cameraMat.txt", FileStorage.Mode.Write);

            fs.Write(cameraMat);
            fs.ReleaseAndGetString();
            fs = new FileStorage("distort.txt", FileStorage.Mode.Write);
            fs.Write(distorCoef);
            fs.ReleaseAndGetString();
        }
Exemplo n.º 13
0
        void ProcessFrame(object sender, EventArgs e)
        {
            _capture.Retrieve(_frame);
            CvInvoke.CvtColor(_frame, _grayFrame, ColorConversion.Bgr2Gray);

            //apply chess board detection
            if (_currentMode == Mode.SavingFrames)
            {
                _find = CvInvoke.FindChessboardCorners(_grayFrame, _patternSize, _corners, CalibCbType.AdaptiveThresh | CalibCbType.FastCheck | CalibCbType.NormalizeImage);
                //we use this loop so we can show a colour image rather than a gray:
                if (_find) //chess board found
                {
                    //make mesurments more accurate by using FindCornerSubPixel
                    CvInvoke.CornerSubPix(_grayFrame, _corners, new Size(11, 11), new Size(-1, -1),
                                          new MCvTermCriteria(30, 0.1));

                    //if go button has been pressed start aquiring frames else we will just display the points
                    if (_startFlag)
                    {
                        _frameArrayBuffer[_frameBufferSavepoint] = _grayFrame; //store the image
                        _frameBufferSavepoint++;                               //increase buffer positon

                        //check the state of buffer
                        if (_frameBufferSavepoint == _frameArrayBuffer.Length)
                        {
                            _currentMode = Mode.CalculatingIntrinsics; //buffer full
                        }
                    }

                    //draw the results
                    CvInvoke.DrawChessboardCorners(_frame, _patternSize, _corners, _find);
                    string msg = string.Format("{0}/{1}", _frameBufferSavepoint + 1, _frameArrayBuffer.Length);

                    int baseLine   = 0;
                    var textOrigin = new Point(_frame.Cols - 2 * 120 - 10, _frame.Rows - 2 * baseLine - 10);
                    CvInvoke.PutText(_frame, msg, textOrigin, FontFace.HersheyPlain, 3, new MCvScalar(0, 0, 255), 2);


                    //calibrate the delay bassed on size of buffer
                    //if buffer small you want a big delay if big small delay
                    Thread.Sleep(100); //allow the user to move the board to a different position
                }
                _corners = new VectorOfPointF();
                _find    = false;
            }
            if (_currentMode == Mode.CalculatingIntrinsics)
            {
                for (int k = 0; k < _frameArrayBuffer.Length; k++)
                {
                    _cornersPointsVec[k] = new VectorOfPointF();
                    CvInvoke.FindChessboardCorners(_frameArrayBuffer[k], _patternSize, _cornersPointsVec[k], CalibCbType.AdaptiveThresh
                                                   | CalibCbType.FastCheck | CalibCbType.NormalizeImage);
                    //for accuracy
                    CvInvoke.CornerSubPix(_grayFrame, _cornersPointsVec[k], new Size(11, 11), new Size(-1, -1),
                                          new MCvTermCriteria(30, 0.1));

                    //Fill our objects list with the real world mesurments for the intrinsic calculations
                    var objectList = new List <MCvPoint3D32f>();
                    for (int i = 0; i < _height; i++)
                    {
                        for (int j = 0; j < _width; j++)
                        {
                            objectList.Add(new MCvPoint3D32f(j * _squareSize, i * _squareSize, 0.0F));
                        }
                    }

                    //corners_object_list[k] = new MCvPoint3D32f[];
                    _cornersObjectList[k] = objectList.ToArray();
                    _cornersPointsList[k] = _cornersPointsVec[k].ToArray();
                }

                //our error should be as close to 0 as possible

                double error = CvInvoke.CalibrateCamera(_cornersObjectList, _cornersPointsList, _grayFrame.Size,
                                                        _cameraMatrix, _distCoeffs, CalibType.RationalModel, new MCvTermCriteria(30, 0.1), out _rvecs, out _tvecs);
                MessageBox.Show(@"Intrinsic Calculation Error: " + error.ToString(CultureInfo.InvariantCulture), @"Results", MessageBoxButtons.OK, MessageBoxIcon.Information); //display the results to the user
                _currentMode = Mode.Calibrated;
            }
            if (_currentMode == Mode.Calibrated)
            {
                Sub_PicturBox.Image = _frame;
                Mat outFrame = _frame.Clone();
                CvInvoke.Undistort(_frame, outFrame, _cameraMatrix, _distCoeffs);
                _frame = outFrame.Clone();
            }

            Main_Picturebox.Image = _frame;
        }
Exemplo n.º 14
0
        public void MainStuff()
        {
            SRC_Img       = new Image <Gray, byte>(@"C:\Users\Админ\Downloads\image63341262,2002.png");
            Corrected_Img = SRC_Img.Clone();

            //CvInvoke.CLAHE(SRC_Img, 40, new Size(8, 8), Corrected_Img);
            //CvInvoke.FindChessboardCorners(SRC_Img, new Size(8,8), vec);
            #region
            PointF[] corners = new PointF[] { new PointF(100, 196), new PointF(261, 190), new PointF(417, 192), new PointF(584, 201),
                                              new PointF(111, 277), new PointF(284, 287), new PointF(458, 291), new PointF(580, 284),
                                              new PointF(130, 368), new PointF(276, 395), new PointF(429, 391), new PointF(563, 365) };
            #endregion
            VectorOfPointF vec = new VectorOfPointF();
            vec.Push(corners);
            // X: 0 - 480 / 3 ||0 159 329 479
            // Y: 0 - 210 / 2 || 0 104 209

            MCvPoint3D32f[] objCorners = new MCvPoint3D32f[] { new MCvPoint3D32f(0, 0, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, 0, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, 0, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, 0, 0.0f),
                                                               new MCvPoint3D32f(0, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, SRC_Img.Height / 2 - 1, 0.0f),
                                                               new MCvPoint3D32f(0, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, SRC_Img.Height - 1, 0.0f) };

            /*
             * for (int i = 0; i < objCorners.Length; i++)
             * {
             * objCorners[i].X += SRC_Img.Width / 2;
             * objCorners[i].Y += SRC_Img.Height / 2;
             * }*/
            //VectorOfPointF objvec = new VectorOfPointF();
            //objvec.Push(objCorners);


            //Corrected_Img = FindTable(SRC_Img);
            Matrix <double> CameraMatrix = new Matrix <double>(3, 3, 1);
            CameraMatrix[0, 0] = 1;
            CameraMatrix[1, 1] = 1;
            CameraMatrix[2, 2] = 1;
            CameraMatrix[0, 2] = 349.417;
            CameraMatrix[1, 2] = 286.417;

            Mat newCameraMatrix = CvInvoke.GetDefaultNewCameraMatrix(CameraMatrix);
            //CvInvoke.Undistort(SRC_Img, Corrected_Img,
            //CvInvoke.FindChessboardCorners(SRC_Img, new System.Drawing.Size(5,5),

            Mat             distCoeffs    = new Mat(1, 5, DepthType.Cv32F, 1);
            Mat             rotCoeffs     = new Mat();
            Mat             translVectors = new Mat();
            MCvTermCriteria TermCriteria  = new MCvTermCriteria(30, 0.1);
            Corrected_Img = SRC_Img.Clone();
            CvInvoke.DrawChessboardCorners(Corrected_Img, new System.Drawing.Size(4, 3), vec, true);
            //CvInvoke.CornerSubPix(SRC_Img, vec, new Size(2, 2), new Size(-1, -1), TermCriteria);
            //CvInvoke.DrawChessboardCorners(SRC_Img, new System.Drawing.Size(4, 3), objvec, true);

            /*
             * try
             * {
             * CvInvoke.Remap(SRC_Img, Corrected_Img, vec, objvec, Inter.Nearest, BorderType.Constant);
             * } catch (Exception ex) { string s = ex.Message; }
             */
            VectorOfPoint3D32F obj3dvec = new VectorOfPoint3D32F();
            obj3dvec.Push(objCorners);

            try
            {
                MCvPoint3D32f[][] corners_object_list = new MCvPoint3D32f[1][];
                PointF[][]        corners_points_list = new PointF[1][];
                corners_object_list[0] = objCorners;
                corners_points_list[0] = corners;
                double r = CvInvoke.CalibrateCamera(obj3dvec,
                                                    vec,
                                                    SRC_Img.Size,
                                                    CameraMatrix,
                                                    distCoeffs,
                                                    rotCoeffs,
                                                    translVectors,
                                                    CalibType.Default,
                                                    TermCriteria);

                //double error = CameraCalibration.CalibrateCamera(corners_object_list, corners_points_list, Gray_Frame.Size, IC, Emgu.CV.CvEnum.CALIB_TYPE.CV_CALIB_RATIONAL_MODEL, out EX_Param);
                r += 0;
                //Matrix<float> dist = new Matrix<float>( new float[] {

                //CvInvoke.Undistort(SRC_Img, Corrected_Img, cameraMatrix, );
            } catch (Exception ex) { }

            IntrinsicCameraParameters IC = new IntrinsicCameraParameters(8);
            Matrix <float>            Map1, Map2;
            IC.InitUndistortMap(SRC_Img.Width, SRC_Img.Height, out Map1, out Map2);
            Image <Gray, Byte> stuff = Undistort(SRC_Img);

            imageBox1.Image = SRC_Img.Resize(imageBox1.Width, imageBox1.Height, Inter.Linear);
            imageBox2.Image = Corrected_Img.Resize(imageBox1.Width, imageBox1.Height, Inter.Linear);
        }
Exemplo n.º 15
0
        /// <summary>
        /// Performs a single calibration execution.
        /// </summary>
        /// <param name="image">The image to search.</param>
        /// <param name="annotatedImage">The image with the corners/circles annotated.</param>
        private void SnapCalibrate(Image <Bgr, byte> image, ref Image <Bgr, byte> annotatedImage)
        {
            try
            {
                // initialize variables if first snap
                if (_imageIndex == 0)
                {
                    _cameraMatrix = new Mat(3, 3, DepthType.Cv64F, 1);
                    _distCoeffs   = new Mat(8, 1, DepthType.Cv64F, 1);
                    _objectPoints = new VectorOfVectorOfPoint3D32F();
                    _imagePoints  = new VectorOfVectorOfPointF();
                    _rvecs        = new Mat();
                    _tvecs        = new Mat();
                    Find          = false;
                    Undistort     = false;
                }

                // find corners/circles
                var corners = new VectorOfPointF();
                var found   = FindCorners(image, ref annotatedImage, corners);
                if (!found)
                {
                    return;
                }

                // flash outpout image
                annotatedImage = annotatedImage.Not();

                // add corners to image points vector
                _imagePoints.Push(corners);

                // construct object points
                var objectList = new List <MCvPoint3D32f>();
                for (var col = 0; col < _cornersPerCol; col++)
                {
                    for (var row = 0; row < _cornersPerRow; row++)
                    {
                        objectList.Add(new MCvPoint3D32f(row * _objWidth, col * _objHeight, 0.0F));
                    }
                }

                // add constructed object points to object points vector
                _objectPoints.Push(new VectorOfPoint3D32F(objectList.ToArray()));

                // increment image index
                ImageIndex++;

                // exti if haven't reached number of images
                if (_imageIndex < _numberOfImages)
                {
                    return;
                }

                // estimate intrinsic/extrinsic parameters
                ProjectionError = CvInvoke.CalibrateCamera(
                    _objectPoints,
                    _imagePoints,
                    image.Size,
                    _cameraMatrix,
                    _distCoeffs,
                    _rvecs,
                    _tvecs,
                    _calibType,
                    new MCvTermCriteria(30, 0.1));

                // latch undistort; reset image index
                Undistort  = true;
                ImageIndex = 0;

                // save parameters to file to be used by undistort image processor
                SaveParameters();
            }
            finally
            {
                Snap = false;
            }
        }
Exemplo n.º 16
0
        public static void ProcessFrames()
        {
            var   cornersObjectList = new List <MCvPoint3D32f[]>();
            var   cornersPointsList = new List <PointF[]>();
            var   width             = 8;                       //width of chessboard no. squares in width - 1
            var   height            = 6;                       // heght of chess board no. squares in heigth - 1
            float squareSize        = width * height;
            var   patternSize       = new Size(width, height); //size of chess board to be detected
            var   corners           = new VectorOfPointF();    //corners found from chessboard

            Mat[] _rvecs, _tvecs;

            var frameArrayBuffer = new List <Mat>();

            var cameraMatrix = new Mat(3, 3, DepthType.Cv64F, 1);
            var distCoeffs   = new Mat(8, 1, DepthType.Cv64F, 1);

            // Glob our frames from the static dir, loop for them
            string[] filePaths = Directory.GetFiles(@"/home/dietpi/", "*.jpg");
            var      frames    = filePaths.Select(path => CvInvoke.Imread(path)).ToList();

            LogUtil.Write("We have " + frames.Count + " frames.");
            var fc = 0;

            foreach (var frame in frames)
            {
                var grayFrame = new Mat();

                // Convert to grayscale
                CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray);

                //apply chess board detection
                var boardFound = CvInvoke.FindChessboardCorners(grayFrame, patternSize, corners,
                                                                CalibCbType.AdaptiveThresh | CalibCbType.FastCheck | CalibCbType.NormalizeImage);
                //we use this loop so we can show a colour image rather than a gray:
                if (boardFound)
                {
                    LogUtil.Write("Found board in frame " + fc);
                    //make measurements more accurate by using FindCornerSubPixel
                    CvInvoke.CornerSubPix(grayFrame, corners, new Size(11, 11), new Size(-1, -1),
                                          new MCvTermCriteria(30, 0.1));
                    frameArrayBuffer.Add(grayFrame);
                }

                fc++;
                corners = new VectorOfPointF();
            }

            LogUtil.Write("We have " + frameArrayBuffer.Count + " frames to use for mapping.");
            // Loop through frames where board was detected
            foreach (var frame in frameArrayBuffer)
            {
                var frameVect = new VectorOfPointF();
                CvInvoke.FindChessboardCorners(frame, patternSize, frameVect,
                                               CalibCbType.AdaptiveThresh
                                               | CalibCbType.FastCheck | CalibCbType.NormalizeImage);
                //for accuracy
                CvInvoke.CornerSubPix(frame, frameVect, new Size(11, 11), new Size(-1, -1),
                                      new MCvTermCriteria(30, 0.1));

                //Fill our objects list with the real world measurements for the intrinsic calculations
                var objectList = new List <MCvPoint3D32f>();
                for (int i = 0; i < height; i++)
                {
                    for (int j = 0; j < width; j++)
                    {
                        objectList.Add(new MCvPoint3D32f(j * squareSize, i * squareSize, 0.0F));
                    }
                }

                //corners_object_list[k] = new MCvPoint3D32f[];
                cornersObjectList.Add(objectList.ToArray());
                cornersPointsList.Add(frameVect.ToArray());
                frameVect.Dispose();
            }

            //our error should be as close to 0 as possible

            double error = CvInvoke.CalibrateCamera(cornersObjectList.ToArray(), cornersPointsList.ToArray(),
                                                    frames[0].Size,
                                                    cameraMatrix, distCoeffs, CalibType.RationalModel, new MCvTermCriteria(30, 0.1), out _rvecs,
                                                    out _tvecs);

            LogUtil.Write("Correction error: " + error);
            var sk = JsonConvert.SerializeObject(cameraMatrix);
            var sd = JsonConvert.SerializeObject(distCoeffs);

            LogUtil.Write("Camera matrix: " + sk);
            LogUtil.Write("Dist coefficient: " + sd);
            DataUtil.SetItem("K", sk);
            DataUtil.SetItem("D", sd);
        }
Exemplo n.º 17
0
        private void button1_Click(object sender, EventArgs e)//生成校准矩阵文件
        {
            board_size.Width  = Convert.ToInt16(textBox4.Text);
            board_size.Height = Convert.ToInt16(textBox5.Text);
            pic_n             = Convert.ToInt16(textBox3.Text);
            pic_h             = Convert.ToInt16(textBox7.Text);
            pic_w             = Convert.ToInt16(textBox6.Text);
            board_N           = board_size.Width * board_size.Height;
            count_time        = Convert.ToInt16(textBox8.Text);
            accuracy          = Convert.ToDouble(textBox9.Text);

            FolderBrowserDialog dialog = new FolderBrowserDialog();

            dialog.Description = "请选择文件路径";
            // dialog.SelectedPath = path;
            if (dialog.ShowDialog() == DialogResult.OK)
            {
                textBox2.Text = dialog.SelectedPath;
            }
            BDPicture_path = textBox2.Text.ToString();
            DirectoryInfo folder = new DirectoryInfo(BDPicture_path);

            image_count = 0;                                                 //图片数量

            Mat[]                rotateMat     = new Mat[pic_n];             //旋转矩阵
            Mat[]                transMat      = new Mat[pic_n];             //平移矩阵
            MCvPoint3D32f[][]    object_points = new MCvPoint3D32f[pic_n][]; //创建目标坐标集合
            List <MCvPoint3D32f> object_list   = new List <MCvPoint3D32f>();

            PointF[][] corner_count = new PointF[pic_n][];  //角点数量
            Mat        view_gray    = new Mat();

            //Matrix
            foreach (FileInfo file in folder.GetFiles("*.jpg").OrderBy(p => p.CreationTime))//定标版图片扫描处理
            {
                try
                {
                    image_count++;
                    // image = new Bitmap(file.FullName);
                    //获取图像的BitmapData对像
                    // Image<Bgr, byte> imageInput = new Image<Bgr, byte>(new Bitmap(Image.FromFile(filename)));
                    Image <Bgr, byte> imageInput = new Image <Bgr, byte>(new Bitmap(Image.FromFile(file.FullName)));
                    if (image_count == 1)
                    {
                        image_size.Width  = imageInput.Cols;
                        image_size.Height = imageInput.Rows;
                        textBox1.AppendText(image_size.Width.ToString() + "\r\n" + image_size.Height.ToString() + "\r\n");
                    }
                    CvInvoke.CvtColor(imageInput, view_gray, ColorConversion.Rgb2Gray);
                    CvInvoke.FindChessboardCorners(view_gray, board_size, Npointsl, CalibCbType.AdaptiveThresh);
                    corner_count[image_count - 1] = new PointF[board_N];
                    for (int S = 0; S < board_N; S++)
                    {
                        corner_count[image_count - 1][S] = Npointsl.ToArray()[S];
                    }
                    imageInput.Dispose();
                }
                catch
                {
                    // MessageBox.Show("图片质量不佳:" + file.FullName);
                    textBox1.AppendText("图片质量不佳:" + file.Name + "\r\n");
                }
                textBox1.AppendText(image_count.ToString() + "\r\n");
            }


            for (T = 0; T < pic_n; T++) ///把角坐标保存数组
            {
                object_list.Clear();
                for (i = 0; i < board_size.Height; i++)
                {
                    for (j = 0; j < board_size.Width; j++)
                    {
                        temple_points.X = j * pic_w;
                        temple_points.Y = i * pic_h;
                        temple_points.Z = 0;
                        object_list.Add(temple_points);
                    }
                }
                object_points[T] = object_list.ToArray();
            }

            CvInvoke.CalibrateCamera(object_points, corner_count, image_size, cameraMatrix, distCoeffs,
                                     CalibType.RationalModel, new MCvTermCriteria(count_time, accuracy), out rotateMat, out transMat);
            if (!ready)
            {
                MessageBox.Show("请选择矩阵输出路径!");
            }
            else
            {
                try
                {
                    FileStorage storage_came = new FileStorage(@matrix_path + "//" + "cameraMatrix.txt", FileStorage.Mode.Write);//路径不能出现中文名字。。。。。
                    storage_came.Write(cameraMatrix);
                    storage_came.ReleaseAndGetString();
                    FileStorage storage_dist = new FileStorage(@matrix_path + "//" + "distCoeffs.txt", FileStorage.Mode.Write);
                    storage_dist.Write(distCoeffs);//distCoeffs:输入参数,相机的畸变系数:,有4,5,8,12或14个元素。如果这个向量是空的,就认为是零畸变系数。
                    storage_dist.ReleaseAndGetString();
                    textBox1.AppendText("矩阵输出:" + matrix_path + "\r\n" + "-----------------------------------------------------" + "\r\n");
                    // cameraMatrix(1,1) =
                    MessageBox.Show("标定成功!");
                    storage_came.Dispose();
                    storage_dist.Dispose();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }
            }
            GC.Collect();
        }
Exemplo n.º 18
0
        static void CameraCalibrate(string filename)
        {
            const int   N            = 1;
            const int   Nx           = 9;
            const int   Ny           = 6;
            const float square_size  = 20.0f;
            const int   Ncorners     = Nx * Ny;
            var         pattern_size = new Size(Nx, Ny);

            var color_image = new Image <Bgr, byte>(filename);
            var gray_image  = color_image.Convert <Gray, byte>();

            // 角点位置坐标:物理坐标系
            var object_corners = new MCvPoint3D32f[N][];

            object_corners[0] = new MCvPoint3D32f[Ncorners];
            var k = 0;

            for (int r = 0; r < Ny; ++r)
            {
                for (int c = 0; c < Nx; ++c)
                {
                    object_corners[0][k++] =
                        new MCvPoint3D32f(
                            10.0f + square_size * (c + 1),
                            5.0f + square_size * (r + 1),
                            0.0f);
                }
            }

            // 角点位置坐标:图像坐标系
            var image_corners    = new PointF[N][];
            var detected_corners = new VectorOfPointF();

            CvInvoke.FindChessboardCorners(gray_image, pattern_size, detected_corners);
            image_corners[0] = detected_corners.ToArray();
            gray_image.FindCornerSubPix(image_corners, new Size(5, 5), new Size(-1, -1), new MCvTermCriteria(30, 0.1));

            var cameraMatrix     = new Mat();
            var distortionCoeffs = new Mat();
            var rotationVectors  = new Mat[N];

            rotationVectors[0] = new Mat();
            var translationVectors = new Mat[N];

            translationVectors[0] = new Mat();

            CvInvoke.CalibrateCamera(
                object_corners,
                image_corners,
                gray_image.Size,
                cameraMatrix,
                distortionCoeffs,
                CalibType.RationalModel,
                new MCvTermCriteria(30, 0.1),
                out rotationVectors,
                out translationVectors);

            var calibrated_image = new Image <Bgr, byte>(color_image.Size);

            CvInvoke.Undistort(color_image, calibrated_image, cameraMatrix, distortionCoeffs);
            CvInvoke.DrawChessboardCorners(color_image, pattern_size, detected_corners, true);
            CvInvoke.Imshow("chessboard", color_image);
            CvInvoke.Imshow("calibrated", calibrated_image);
            CvInvoke.WaitKey();
        }
Exemplo n.º 19
0
        /// <summary>
        /// 相机内参计算函数
        /// </summary>
        void CameraIntrinsicCalculate()
        {
            string[] allimgfiles = File.ReadAllLines(root + @"\calibrationImg.txt");    //读取文件中所有图片名称
            string   imgfile     = root + @"\" + allimgfiles[0];                        //必须是完整的路径

            try
            {
                for (int i = 0; i < allimgfiles.Length; i++)
                {
                    imgfile = root + @"\" + allimgfiles[i];
                    if (imgfile.Contains("leftImg"))                 //提取左相机图片角点
                    {
                        srcImg = CvInvoke.Imread(imgfile, ImreadModes.Color);
                        CvInvoke.CvtColor(srcImg, grayImg, ColorConversion.Bgr2Gray);
                        if (false == CvInvoke.FindChessboardCorners(grayImg, patternSize, corners))
                        {
                            throw new Exception("左相机采集图片未识别到所有角点~,请重新采集~");
                        }
                        else
                        {
                            CvInvoke.Find4QuadCornerSubpix(grayImg, corners, new Size(11, 11)); //亚像素级角点精确化
                            left_corners_set.Push(corners);                                     //存储提取的角点坐标
                        }
                    }
                    else if (imgfile.Contains("rightImg"))           //提取右相机图片角点
                    {
                        srcImg = CvInvoke.Imread(imgfile, ImreadModes.Color);
                        CvInvoke.CvtColor(srcImg, grayImg, ColorConversion.Bgr2Gray);
                        if (false == CvInvoke.FindChessboardCorners(grayImg, patternSize, corners))
                        {
                            throw new Exception("右相机采集图片未识别到所有角点~,请重新采集~");
                        }
                        else
                        {
                            CvInvoke.Find4QuadCornerSubpix(grayImg, corners, new Size(11, 11)); //亚像素级角点精确化
                            right_corners_set.Push(corners);                                    //存储提取的角点坐标
                        }
                    }
                    CvInvoke.WaitKey(10);
                }
                for (int k = 0; k < left_corners_set.Size; k++)
                {
                    VectorOfPoint3D32F tempPoint = new VectorOfPoint3D32F();
                    for (int i = 0; i < patternSize.Height; i++)
                    {
                        for (int j = 0; j < patternSize.Width; j++)
                        {
                            MCvPoint3D32f[] objPoint = new MCvPoint3D32f[1];
                            objPoint[0].X = j * SquareSize;
                            objPoint[0].Y = i * SquareSize;
                            objPoint[0].Z = 0;
                            tempPoint.Push(objPoint);
                        }
                    }
                    objectpoints.Push(tempPoint);
                }
                //左相机内参标定
                CvInvoke.CalibrateCamera(objectpoints, left_corners_set, new Size(320, 240),
                                         leftCamMatrix, leftDistCoeffs, leftRvecs, leftTvecs, CalibType.Default, new MCvTermCriteria(30, 0.00001));
                //右相机内参标定
                CvInvoke.CalibrateCamera(objectpoints, right_corners_set, new Size(320, 240),
                                         rightCamMatrix, rightDistCoeffs, rightRvecs, rightTvecs, CalibType.Default, new MCvTermCriteria(30, 0.00001));
                Data.LogString = "相机内参标定完成,请保存数据~~";
            }
            catch (Exception e)
            {
                Data.LogString = e.Message;
            }
        }
Exemplo n.º 20
0
        private void onImageGrab(Mat image)
        {
            frame = image;

            if (frame == null)
            {
                return;
            }

            // konwersja do koloru graya.
            CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray);

            // tryb zapisywania punktow.
            if (currentMode == Modes.Saving)
            {
                hasCorners = CvInvoke.FindChessboardCorners(grayFrame, patternSize, corners, CalibCbType.AdaptiveThresh | CalibCbType.FastCheck | CalibCbType.NormalizeImage);
                //we use this loop so we can show a colour image rather than a gray:
                if (hasCorners) //chess board found
                {
                    //make mesurments more accurate by using FindCornerSubPixel
                    CvInvoke.CornerSubPix(grayFrame, corners, new Size(11, 11), new Size(-1, -1),
                                          new MCvTermCriteria(30, 0.1));

                    //if go button has been pressed start aquiring frames else we will just display the points
                    frameArrayBuffer[frameBufferSavepoint] = grayFrame; //store the image
                    frameBufferSavepoint++;                             //increase buffer positon

                    //check the state of buffer
                    if (frameBufferSavepoint == frameArrayBuffer.Length)
                    {
                        currentMode = Modes.CalculateIntristrics; //buffer full
                    }
                    Thread.Sleep(100);
                    CvInvoke.DrawChessboardCorners(frame, patternSize, corners, hasCorners);

                    string text = String.Format("{0}", frameBufferSavepoint);

                    if (this.countLabel.InvokeRequired)
                    {
                        StringArgReturningVoidDelegate d = new StringArgReturningVoidDelegate(SetLabelText);
                        this.Invoke(d, new object[] { text });
                    }
                    else
                    {
                        SetLabelText(text);
                    }
                }

                corners    = new VectorOfPointF();
                hasCorners = false;
            }

            // Kalibracja.
            if (currentMode == Modes.CalculateIntristrics)
            {
                for (int k = 0; k < frameArrayBuffer.Length; k++)
                {
                    cornersPointsVec[k] = new VectorOfPointF();
                    CvInvoke.FindChessboardCorners(frameArrayBuffer[k], patternSize, cornersPointsVec[k], CalibCbType.AdaptiveThresh
                                                   | CalibCbType.FastCheck | CalibCbType.NormalizeImage);
                    //for accuracy
                    CvInvoke.CornerSubPix(grayFrame, cornersPointsVec[k], new Size(11, 11), new Size(-1, -1),
                                          new MCvTermCriteria(30, 0.1));

                    //Fill our objects list with the real world mesurments for the intrinsic calculations
                    var objectList = new List <MCvPoint3D32f>();
                    for (int i = 0; i < height; i++)
                    {
                        for (int j = 0; j < width; j++)
                        {
                            objectList.Add(new MCvPoint3D32f(j * squareSize, i * squareSize, 0.0F));
                        }
                    }

                    //corners_object_list[k] = new MCvPoint3D32f[];
                    cornersObjectList[k] = objectList.ToArray();
                    cornersPointsList[k] = cornersPointsVec[k].ToArray();
                }

                //our error should be as close to 0 as possible

                double error = CvInvoke.CalibrateCamera(cornersObjectList, cornersPointsList, grayFrame.Size,
                                                        cameraMatrix, distCoeffs, CalibType.RationalModel, new MCvTermCriteria(30, 0.1), out rvecs, out tvecs);
                MessageBox.Show(@"Intrinsic Calculation Error: " + error.ToString(CultureInfo.InvariantCulture), @"Results", MessageBoxButtons.OK, MessageBoxIcon.Information); //display the results to the user
                currentMode = Modes.Calibrated;
            }

            if (currentMode == Modes.Calibrated)
            {
                Mat outFrame = grayFrame.Clone();
                CvInvoke.Undistort(image, outFrame, cameraMatrix, distCoeffs);
                grayFrame = outFrame.Clone();

                if (calibrationSaved)
                {
                    SaveCalibration();
                    calibrationSaved = false;
                }
            }

            SetImageBox(frame);
        }
Exemplo n.º 21
0
        //相机标定
        private void CalibraCamera(object sender, EventArgs e)
        {
            //图像标定
            StreamReader sin = new StreamReader("calibdata1.txt");

            //读取每一副图像,从中提出角点,然后对角点进行亚像素精确化
            Console.WriteLine("开始提取角点");
            int            image_count = 0;          //图像数量
            Size           image_size  = new Size(); //图像尺寸
            int            width       = 4;
            int            height      = 6;
            Size           board_size  = new Size(4, 6);                       //标定版上每行每列的角点数目
            int            CornerNum   = board_size.Width * board_size.Height; //每张图片上的角点总数
            int            nImage      = 14;
            VectorOfPointF Npointsl    = new VectorOfPointF();

            string filename;
            int    count = 0;//用于存储角点个数

            Console.WriteLine("count = " + count);
            MCvPoint3D32f[][] object_points = new MCvPoint3D32f[nImage][];//保存标定板上角点的三维坐标
            PointF[][]        corner_count  = new PointF[nImage][];
            while ((filename = sin.ReadLine()) != null)
            {
                image_count++;
                //用于观察检验输出
                Console.WriteLine("image_count = " + image_count);
                //输出检验
                //打开获取到的图像
                Image <Bgr, byte> imageInput = new Image <Bgr, byte>(new Bitmap(Image.FromFile(filename)));
                pictureBox1.Image = imageInput.ToBitmap();

                if (image_count == 1)//读入第一张图片时获取图像宽高信息
                {
                    Console.WriteLine("<---成功读取第一张图片--->");
                    image_size.Width  = imageInput.Cols;
                    image_size.Height = imageInput.Rows;
                    Console.WriteLine("image_size.Width  = " + image_size.Width);
                    Console.WriteLine("image_size.Hright = " + image_size.Height);
                }
                //提取角点
                Mat view_gray = new Mat();
                CvInvoke.CvtColor(imageInput, view_gray, ColorConversion.Rgb2Gray);
                //提取内角点(内角点与标定板的边缘不接触)
                //对每一张标定图片,提取角点信息

                /*
                 *              第一个参数Image,传入拍摄的棋盘图Mat图像,必须是8位的灰度或者彩色图像;
                 * 第二个参数patternSize,每个棋盘图上内角点的行列数,一般情况下,行列数不要相同,便于后续标定程序识别标定板的方向;
                 * 第三个参数corners,用于存储检测到的内角点图像坐标位置,一般用元素是VectorOfPoint类型表示
                 * 第四个参数flage:用于定义棋盘图上内角点查找的不同处理方式,有默认值。
                 */
                CvInvoke.FindChessboardCorners(view_gray, board_size, Npointsl, CalibCbType.AdaptiveThresh);
                corner_count[image_count - 1] = new PointF[24];
                for (int i = 0; i < 24; i++)
                {
                    corner_count[image_count - 1][i] = Npointsl.ToArray()[i];
                }
                //为了提高标定精度,需要在初步提取的角点信息上进一步提取亚像素信息,降低相机标定偏差
                //亚像素精确化FindCornerSubPix()

                /*
                 * 第一个参数corners,初始的角点坐标向量,同时作为亚像素坐标位置的输出,所以需要是浮点型数据,一般用元素是PointF[][]向量来表示
                 * 第二个参数winSize,大小为搜索窗口的一半;
                 * 第三个参数zeroZone,死区的一半尺寸,死区为不对搜索区的中央位置做求和运算的区域。它是用来避免自相关矩阵出现某些可能的奇异性。当值为(-1,-1)时表示没有死区;
                 * 第四个参数criteria,定义求角点的迭代过程的终止条件,可以为迭代次数和角点精度两者的组合;
                 */
                view_gray.ToImage <Gray, byte>().FindCornerSubPix(corner_count, new Size(11, 11), new Size(-1, -1), new MCvTermCriteria(30, 0.1));
                //在图像上显示角点位置,在图片中标记角点
                Console.WriteLine("第" + image_count + "个图片已经被标记角点");
                //DrawChessboardCorners用于绘制被成功标定的角点

                /*
                 *              第一个参数image,8位灰度或者彩色图像;
                 * 第二个参数patternSize,每张标定棋盘上内角点的行列数;
                 * 第三个参数corners,初始的角点坐标向量,同时作为亚像素坐标位置的输出,所以需要是浮点型数据
                 * 第四个参数patternWasFound,标志位,用来指示定义的棋盘内角点是否被完整的探测到,true表示别完整的探测到,函数会用直线依次连接所有的内角点,作为一个整体,false表示有未被探测到的内角点,这时候函数会以(红色)圆圈标记处检测到的内角点;
                 */
                CvInvoke.DrawChessboardCorners(view_gray, board_size, Npointsl, true);//非必需,仅用做测试
                pictureBox2.Image = view_gray.ToImage <Bgr, byte>().ToBitmap();
                count             = image_count;
                CvInvoke.WaitKey(500);//暂停0.5秒*/
            }
            Console.WriteLine("角点提取完成!!!");
            //摄像机标定
            Console.WriteLine("开始标定");
            //摄像机内参数矩阵
            Mat cameraMatrix = new Mat(3, 3, DepthType.Cv32F, 1);
            //畸变矩阵
            //摄像机的5个畸变系数:k1,k2,p1,p2,k3
            Mat distCoeffs = new Mat(1, 5, DepthType.Cv32F, 1);

            //旋转矩阵R
            Mat[] rotateMat = new Mat[nImage];
            for (int i = 0; i < nImage; i++)
            {
                rotateMat[i] = new Mat(3, 3, DepthType.Cv32F, 1);
            }
            //平移矩阵T
            Mat[] transMat = new Mat[nImage];
            for (int i = 0; i < nImage; i++)
            {
                transMat[i] = new Mat(3, 1, DepthType.Cv32F, 1);
            }
            //初始化标定板上角点的三维坐标
            List <MCvPoint3D32f> object_list = new List <MCvPoint3D32f>();

            for (int k = 0; k < nImage; k++)
            {
                object_list.Clear();
                for (int i = 0; i < height; i++)
                {
                    for (int j = 0; j < width; j++)
                    {
                        object_list.Add(new MCvPoint3D32f(j, i, 0f));
                    }
                }
                object_points[k] = object_list.ToArray();
            }
            //相机标定
            //获取到棋盘标定图的内角点图像坐标之后,使用CalibrateCamera函数进行相机标定,计算相机内参和外参矩阵

            /*
             *          第一个参数objectPoints,为世界坐标系中的三维点。在使用时,应该输入一个三维坐标点的向量的向量MCvPoint3D32f[][],即需要依据棋盘上单个黑白矩阵的大小,计算出(初始化)每一个内角点的世界坐标。
             * 第二个参数imagePoints,为每一个内角点对应的图像坐标点。和objectPoints一样,应该输入PointF[][]类型变量;
             * 第三个参数imageSize,为图像的像素尺寸大小,在计算相机的内参和畸变矩阵时需要使用到该参数;
             * 第四个参数cameraMatrix为相机的内参矩阵。输入一个Mat cameraMatrix即可,如Mat cameraMatrix=Mat(3,3,CV_32FC1,Scalar::all(0));
             * 第五个参数distCoeffs为畸变矩阵。输入一个Mat distCoeffs=Mat(1,5,CV_32FC1,Scalar::all(0))即可
             *          第六个参数CalibType相机标定类型
             *          第七个参数criteria是最优迭代终止条件设定
             *          第八个参数out Mat[]类型的旋转矩阵
             *          第九个参数out Mat[]类型的平移矩阵
             */
            //在使用该函数进行标定运算之前,需要对棋盘上每一个内角点的空间坐标系的位置坐标进行初始化
            //标定的结果是生成相机的内参矩阵cameraMatrix、相机的5个畸变系数distCoeffs
            //另外每张图像都会生成属于自己的平移向量和旋转向量
            CvInvoke.CalibrateCamera(object_points, corner_count, image_size, cameraMatrix,
                                     distCoeffs, CalibType.RationalModel, new MCvTermCriteria(30, 0.1), out rotateMat, out transMat);
            Console.WriteLine("标定完成");
            /*标定评价略*/
            //利用标定结果对图像进行畸变校正
            //mapx和mapy为输出的x/y坐标重映射参数

            /*
             *          Mat mapx = new Mat(image_size, DepthType.Cv32F, 1);
             *          Mat mapy = new Mat(image_size, DepthType.Cv32F, 1);
             *          //可选输入,是第一和第二相机坐标之间的旋转矩阵
             *          Mat R = new Mat(3, 3, DepthType.Cv32F, 1);
             *          //输出校正之后的3x3摄像机矩阵
             *          Mat newCameraMatrix = new Mat(3, 3, DepthType.Cv32F, 1);
             */
            Console.WriteLine("保存矫正图像");

            StreamReader sin_test = new StreamReader("calibdata1.txt");
            string       filename_test;

            for (int i = 0; i < nImage; i++)
            {
                //InitUndistortRectifyMap用来计算畸变映射
                //CvInvoke.InitUndistortRectifyMap(cameraMatrix, distCoeffs, R, newCameraMatrix, image_size, DepthType.Cv32F, mapx, mapy);
                if ((filename_test = sin_test.ReadLine()) != null)
                {
                    Image <Bgr, byte> imageSource = new Image <Bgr, byte>(new Bitmap(Image.FromFile(filename_test)));
                    Image <Bgr, byte> newimage    = imageSource.Clone();
                    CvInvoke.Undistort(imageSource, newimage, cameraMatrix, distCoeffs);
                    //Remap把求得的映射应用到图像上
                    //CvInvoke.Remap(imageSource, newimage, mapx, mapy, Inter.Linear, BorderType.Constant, new MCvScalar(0));
                    pictureBox3.Image = imageSource.ToBitmap();
                    pictureBox4.Image = newimage.ToBitmap();
                    CvInvoke.WaitKey(500);
                }
            }
            Console.WriteLine("标定结束!");
        }