Exemple #1
0
        /// <summary>
        /// Construct eye-rays through the specified pixels
        /// </summary>
        /// <param name="icp">Intrinsic camera parameters</param>
        /// <param name="pixels">Pixels</param>
        /// <returns>Enumerable ray collection</returns>
        public static Ray[] EyeRays(Emgu.CV.IntrinsicCameraParameters icp, PointF[] pixels)
        {
            Ray[] rays = new Ray[pixels.Length];
            if (pixels.Length > 0)
            {
                // 1. Undistort pixels
                PointF[] undistorted_pixels = icp.Undistort(pixels, null, icp.IntrinsicMatrix);
                // 2. Create rays
                // Use inverse intrinsic calibration and depth = 1
                double cx = icp.IntrinsicMatrix.Data[0, 2];
                double cy = icp.IntrinsicMatrix.Data[1, 2];
                double fx = icp.IntrinsicMatrix.Data[0, 0];
                double fy = icp.IntrinsicMatrix.Data[1, 1];


                Vector direction = new Vector(3);
                for (int i = 0; i < undistorted_pixels.Length; ++i)
                {
                    PointF pixel = undistorted_pixels[i];
                    direction[0] = (pixel.X - cx) / fx;
                    direction[1] = (pixel.Y - cy) / fy;
                    direction[2] = 1;
                    rays[i]      = new Ray(direction.Normalize());
                }
            }
            return(rays);
        }
    public Emgu.CV.IntrinsicCameraParameters Calibrate() 
    {
      if (_image_points.Count == 0) 
      {
        throw new InvalidOperationException("No image points present.");
      }

      List<MCvPoint3D32f[]> points = new List<MCvPoint3D32f[]>();
      for (int i = 0; i < _image_points.Count; ++i) 
      {
        points.Add(this._converted_object_points);
      }
      Emgu.CV.ExtrinsicCameraParameters[] ecp;

      Emgu.CV.IntrinsicCameraParameters icp = new Emgu.CV.IntrinsicCameraParameters();

      Emgu.CV.CameraCalibration.CalibrateCamera(
        points.ToArray(),
        _image_points.ToArray(),
        _img_size,
        icp,
        Emgu.CV.CvEnum.CALIB_TYPE.DEFAULT,
        out ecp
      );

      //Console.WriteLine(points.Count);
      //Console.Write(points[1].ToString());
      return icp;
    }
        public override object EditValue(
            ITypeDescriptorContext context,
            IServiceProvider provider,
            object value)
        {
            if (context == null)
            {
                return(null);
            }


            using (OpenFileDialog ofd = new OpenFileDialog()) {
                ofd.Filter = "Intrinsic Files|*.icp";
                if (ofd.ShowDialog() == DialogResult.OK)
                {
                    Emgu.CV.IntrinsicCameraParameters icp = null;
                    try {
                        using (Stream s = File.Open(ofd.FileName, FileMode.Open)) {
                            if (s != null)
                            {
                                IFormatter formatter = new BinaryFormatter();
                                icp = formatter.Deserialize(s) as Emgu.CV.IntrinsicCameraParameters;
                                s.Close();
                            }
                        }
                    } catch (Exception) {}
                    return(icp);
                }
                else
                {
                    return(value);
                }
            }
        }
Exemple #4
0
        public Emgu.CV.IntrinsicCameraParameters Calibrate()
        {
            if (_image_points.Count == 0)
            {
                throw new InvalidOperationException("No image points present.");
            }

            List <MCvPoint3D32f[]> points = new List <MCvPoint3D32f[]>();

            for (int i = 0; i < _image_points.Count; ++i)
            {
                points.Add(this._converted_object_points);
            }
            Emgu.CV.ExtrinsicCameraParameters[] ecp;

            Emgu.CV.IntrinsicCameraParameters icp = new Emgu.CV.IntrinsicCameraParameters();

            Emgu.CV.CameraCalibration.CalibrateCamera(
                points.ToArray(),
                _image_points.ToArray(),
                _img_size,
                icp,
                Emgu.CV.CvEnum.CALIB_TYPE.DEFAULT,
                out ecp
                );

            return(icp);
        }
Exemple #5
0
 /// <summary>
 /// Draw a visual indication of the pattern coordinate frame
 /// </summary>
 /// <param name="img">Image to draw to</param>
 /// <param name="ecp">Extrinsic calibration</param>
 /// <param name="icp">Intrinsic calibration</param>
 public virtual void DrawCoordinateFrame(
     Emgu.CV.Image <Bgr, Byte> img,
     Emgu.CV.ExtrinsicCameraParameters ecp,
     Emgu.CV.IntrinsicCameraParameters icp)
 {
     Drawing.DrawCoordinateFrame(img, ecp, icp);
 }
        public static void CalibrationError(
            Emgu.CV.ExtrinsicCameraParameters ecp,
            Emgu.CV.IntrinsicCameraParameters icp,
            System.Drawing.PointF[] image_points,
            Vector[] reference_points,
            out double[] deviations,
            out Vector[] isect_points)
        {
            // Shoot rays through image points,
            // intersect with plane defined by extrinsic
            // and measure distance to reference points
            Matrix inv_ecp = Matrix.Identity(4, 4);

            inv_ecp.SetMatrix(0, 2, 0, 3, ecp.ExtrinsicMatrix.ToParsley());
            inv_ecp = inv_ecp.Inverse();

            Ray[] rays = Ray.EyeRays(icp, image_points);
            Plane p    = new Plane(ecp);

            isect_points = new Vector[rays.Length];

            deviations = new double[rays.Length];
            for (int i = 0; i < rays.Length; ++i)
            {
                double t;
                Intersection.RayPlane(rays[i], p, out t);
                Vector isect = rays[i].At(t);
                Vector x     = new Vector(new double[] { isect[0], isect[1], isect[2], 1 });
                x = (inv_ecp * x.ToColumnMatrix()).GetColumnVector(0);
                Vector final = new Vector(new double[] { x[0], x[1], x[2] });
                isect_points[i] = final;
                deviations[i]   = (final - reference_points[i]).Norm();
            }
        }
 public ExtrinsicCalibration(Vector[] object_points, Emgu.CV.IntrinsicCameraParameters intrinsics)
   : base(object_points) 
 {
   _intrinsics = intrinsics;
   // Since object points remain constant, we can apply their conversion right here
   _converted_object_points = Array.ConvertAll<Vector, MCvPoint3D32f>(
     this.ObjectPoints, 
     new Converter<Vector, MCvPoint3D32f>(Extensions.ConvertFromParsley.ToEmguF)
   );
 }
 public ExtrinsicCalibration(Vector[] object_points, Emgu.CV.IntrinsicCameraParameters intrinsics)
     : base(object_points)
 {
     _intrinsics = intrinsics;
     // Since object points remain constant, we can apply their conversion right here
     _converted_object_points = Array.ConvertAll <Vector, MCvPoint3D32f>(
         this.ObjectPoints,
         new Converter <Vector, MCvPoint3D32f>(Extensions.ConvertFromParsley.ToEmguF)
         );
 }
        public static void VisualizeError(
            Emgu.CV.Image <Bgr, Byte> img,
            Emgu.CV.ExtrinsicCameraParameters ecp,
            Emgu.CV.IntrinsicCameraParameters icp,
            double[] deviations,
            Vector[] isect_points)
        {
            Bgr     bgr = new Bgr(System.Drawing.Color.Green);
            MCvFont f   = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8);

            foreach (Vector p in isect_points)
            {
                System.Drawing.PointF[] coords = Emgu.CV.CameraCalibration.ProjectPoints(
                    new MCvPoint3D32f[] { p.ToEmguF() },
                    ecp, icp
                    );
                img.Draw(new CircleF(coords[0], 1), bgr, 1);
            }
        }
Exemple #10
0
        /// <summary>
        /// Draw a visual indication of the pattern coordinate frame
        /// </summary>
        /// <param name="img">Image to draw to</param>
        /// <param name="ecp">Extrinsic calibration</param>
        /// <param name="icp">Intrinsic calibration</param>
        public static void DrawCoordinateFrame(
            Emgu.CV.Image <Bgr, Byte> img,
            Emgu.CV.ExtrinsicCameraParameters ecp,
            Emgu.CV.IntrinsicCameraParameters icp)
        {
            float extension = img.Width / 10;

            PointF[] coords = Emgu.CV.CameraCalibration.ProjectPoints(
                new MCvPoint3D32f[] {
                new MCvPoint3D32f(0, 0, 0),
                new MCvPoint3D32f(extension, 0, 0),
                new MCvPoint3D32f(0, extension, 0),
                new MCvPoint3D32f(0, 0, extension),
            },
                ecp, icp);

            img.Draw(new LineSegment2DF(coords[0], coords[1]), new Bgr(System.Drawing.Color.Red), 2);
            img.Draw(new LineSegment2DF(coords[0], coords[2]), new Bgr(System.Drawing.Color.Green), 2);
            img.Draw(new LineSegment2DF(coords[0], coords[3]), new Bgr(System.Drawing.Color.Blue), 2);
        }
Exemple #11
0
        /// <summary>
        /// Creates a off-axis perspective matrix from camera intrinsics.
        /// </summary>
        /// <remarks> Note that OSG layouts their matrices in row-major order, so you
        /// need to transpose this matrix, before passing it to OSG</remarks>
        /// <param name="camera">Camera with intrinsic parameters</param>
        /// <returns>Perspective matrix in column-major</returns>
        public static Matrix FromCamera(Camera camera, double near, double far)
        {
            if (!camera.HasIntrinsics)
            {
                throw new ArgumentException("Camera has no intrinsic calibration");
            }
            // see
            // http://opencv.willowgarage.com/wiki/Posit
            // http://opencv.willowgarage.com/documentation/camera_calibration_and_3d_reconstruction.html
            // http://aoeu.snth.net/static/gen-perspective.pdf
            // http://www.hitl.washington.edu/artoolkit/mail-archive/message-thread-00654-Re--Questions-concering-.html

            Matrix m = new Matrix(4, 4, 0.0);

            Emgu.CV.IntrinsicCameraParameters icp = camera.Intrinsics;
            double fx = icp.IntrinsicMatrix[0, 0];
            double fy = icp.IntrinsicMatrix[1, 1];
            double px = icp.IntrinsicMatrix[0, 2];
            double py = icp.IntrinsicMatrix[1, 2];
            double w  = camera.FrameWidth;
            double h  = camera.FrameHeight;

            double dist = far - near;

            // First row
            m[0, 0] = 2.0 * (fx / w);
            m[0, 2] = 2.0 * (px / w) - 1.0;

            // Second row
            m[1, 1] = 2.0 * (fy / h);
            m[1, 2] = 2.0 * (py / h) - 1.0;

            // Third row
            m[2, 2] = -(far + near) / dist;
            m[2, 3] = -2.0 * far * near / dist;

            // Fourth row (note the flip)
            m[3, 2] = -1.0;

            return(m);
        }
        public static void TestOpenCVProjection()
        {
            int numPoints = 1;

            Matrix<double> worldPoints = DenseMatrix.CreateRandom(2, numPoints, new ContinuousUniform(100, 300));
            worldPoints = worldPoints.InsertRow(2, DenseVector.CreateRandom(numPoints, new ContinuousUniform(500, 2000)));

            var posePosition = new DenseVector(new double[] { 1000, 2000, 3000 });
            var poseQuat = new DenseVector(new double[] { 0.4823661149, -0.009425591677, 0.8759094477, -0.004083401989 });
            //var poseQuat = new DenseVector(new double[] { 1, 0, 0, 0 });

            var extParameters = new Emgu.CV.ExtrinsicCameraParameters();
            extParameters.RotationVector = new Emgu.CV.RotationVector3D();
            extParameters.RotationVector.RotationMatrix = poseQuat.QuaternionToRotation().Inverse().ToEmguMatrix();
            extParameters.TranslationVector = (-posePosition).ToColumnMatrix().ToEmguMatrix();

            DenseMatrix calibrationMatrix = (DenseMatrix)Pose3D.CreateCalibrationMatrix(525, 320, 240);

            var intParameters = new Emgu.CV.IntrinsicCameraParameters();
            intParameters.IntrinsicMatrix = calibrationMatrix.ToEmguMatrix();

            var openCVProjectedPoints =
                Emgu.CV.CameraCalibration.ProjectPoints(
                worldPoints.ColumnEnumerator().Select(col => new MCvPoint3D32f((float)col.Item2[0], (float)col.Item2[1], (float)col.Item2[2])).ToArray(),
                extParameters,
                intParameters);

            Console.WriteLine("Open CV Projeted points: \n{0:0.00}", openCVProjectedPoints.ToMatrix(p => new double[] { p.X, p.Y }).Transpose());

            var myProjectedPoints = Pose3D.WorldToImagePoints(
                                            worldPoints,
                                            calibrationMatrix,
                                            posePosition,
                                            poseQuat.QuaternionToRotation());
            Console.WriteLine("My Projeted points: \n{0:0.00}", myProjectedPoints);
        }
Exemple #13
0
        /// <summary>
        /// Generate a intrinsic/extrinsic calibration from an initial set of four marker points.
        /// </summary>
        /// <param name="finals">List of ellipses</param>
        /// <param name="marker_ids">Ellipse ids of detected marker-ellipses</param>
        /// <param name="icp">Resulting intrinsic calibration</param>
        /// <param name="ecp">Resulting extrinsic calibration</param>
        /// <param name="size">Image size</param>
        private void ApproximatePlane(List <DetectedEllipse> finals, int[] marker_ids, out Emgu.CV.IntrinsicCameraParameters icp, out Emgu.CV.ExtrinsicCameraParameters ecp, System.Drawing.Size size)
        {
            // Currently the marker points correspond to corner points in the rectangular pattern.
            Vector[] object_points = new Vector[] {
                this.ObjectPoints[0],
                this.ObjectPoints[_number_circle_centers.Width - 1],
                this.ObjectPoints[_number_circle_centers.Width * (_number_circle_centers.Height - 1)],
                this.ObjectPoints[_number_circle_centers.Width * _number_circle_centers.Height - 1]
            };

            System.Drawing.PointF[] image_points =
                marker_ids.Select(id => { return(finals[id].Ellipse.MCvBox2D.center); }).ToArray();

            Parsley.Core.IntrinsicCalibration ic = new Parsley.Core.IntrinsicCalibration(object_points, size);
            ic.AddView(image_points);
            icp = ic.Calibrate();
            Parsley.Core.ExtrinsicCalibration ec = new Parsley.Core.ExtrinsicCalibration(object_points, icp);
            ecp = ec.Calibrate(image_points);
        }
        public static void TestPoseEstOpenCVRealData()
        {
            Action<string, IEnumerable<string>> octaveLog = (fname, ie) => File.WriteAllLines(Path.Combine(@"C:\Kamal\RSE\WorkingDirs\Octave\data", fname), ie);

            string mapFile = @"C:\Kamal\RSE\WorkingDirs\Visualizaton\espresso-1.bag.dump.map";
            string poseFile = @"C:\Kamal\RSE\RawData\espresso-1-fs-0\espresso-1-fs-0\espresso-1-fs-0\loop_closure\loop-closure.txt";

            DenseMatrix camCalibration = (DenseMatrix)Pose3D.CreateCalibrationMatrix(525, 320, 240);
            var intParameters = new Emgu.CV.IntrinsicCameraParameters();
            intParameters.IntrinsicMatrix = camCalibration.ToEmguMatrix();

            var poseData = ImageMap.ParsePoseData(File.ReadLines(poseFile),
                                    (frameID, poseQuaternion, posePosition) =>
                                        new
                                        {
                                            frameID,
                                            poseQuaternion,
                                            posePosition
                                        })
                                        .ToDictionary(p => p.frameID, p => new { p.posePosition, p.poseQuaternion });

            var imageMap =
                ImageMap.LoadImageMap(mapFile, (frameID, imagePoint, depth, point3D, descriptor) =>
                                new
                                {
                                    frameID,
                                    imagePoint,
                                    depth,
                                    point3D,
                                    descriptor
                                })
                                .Select((point, lineNumber) => new { point, lineNumber})
                                .ToList();

            var frameWiseMap =
            imageMap.GroupBy(i => i.point.frameID)
            .Select(f =>
                        new
                        {
                            frameID = f.Key,
                            projectedPoints = f.OrderBy(ff => ff.lineNumber).Select(ff => ff.point.imagePoint),
                            points3D = f.OrderBy(ff => ff.lineNumber).Select(ff => ff.point.point3D),
                        })
                        .ToArray();

            Func<int, Dictionary<string, DenseMatrix>> inferPoses =
                (numPoints) =>
                    frameWiseMap
                        .Select(f =>
                                    new
                                    {
                                        frameID = f.frameID,
                                        inferredPositionVector =
                                                    Emgu.CV.CameraCalibration.FindExtrinsicCameraParams2(
                                                        f.points3D
                                                            .Take(numPoints)
                                                            .Select(point3D => new MCvPoint3D32f((float)point3D[0], (float)point3D[1], (float)point3D[2]))
                                                            .ToArray(),
                                                        f.projectedPoints.Take(numPoints).ToArray(),
                                                        intParameters
                                                        )
                                                        .TranslationVector.Mul(-1).ToDenseMatrix() //open cv returns negated extrinsic parameters from the perspective of my pose primitives
                                    })
                                    .ToDictionary(f => f.frameID, f => f.inferredPositionVector);

            Func<int, double> computePoseInferenceError = (numPoints) =>
                    inferPoses(numPoints).Select(inf =>
                        inf.Value.Subtract(poseData[inf.Key].posePosition.ToColumnMatrix()).Magnitude())
                        .Average();

            var poseInfErrors = Enumerable.Range(5, 10).Select(numPoints => new { numPoints, poseError = computePoseInferenceError(numPoints) });

            octaveLog("posError.txt", poseInfErrors.Select(d => String.Format("{0}\t{1}", d.numPoints, d.poseError)));
            //Console.WriteLine(framePoseInferenceError.Average());
        }
        //confirm 3d geometry to infer the pose given many world points and 2d pixels
        public static void TestPoseEstimationOpenCV()
        {
            DenseVector poseQuat = new DenseVector(new double[] { 1, 0, 0, 0 });
            DenseVector posePosition = new DenseVector(new double[] { 0, 0, 0 });

            DenseMatrix calibrationMatrix = (DenseMatrix)Pose3D.CreateCalibrationMatrix(525, 320, 240);

            int numPoints = 7;

            Matrix<double> worldPoints = DenseMatrix.CreateRandom(2, numPoints, new ContinuousUniform(100, 300));
            worldPoints = worldPoints.InsertRow(2, DenseVector.CreateRandom(numPoints, new ContinuousUniform(500, 2000)));

            var extParameters = new Emgu.CV.ExtrinsicCameraParameters();
            extParameters.RotationVector = new Emgu.CV.RotationVector3D(new double[] { 4, 5, 6 });
            extParameters.TranslationVector = new Emgu.CV.Matrix<double>(new double[] { 1, 2, 3 });

            Console.WriteLine("Known extrinsic:\n {0}", RGBLocalization.Utility.MatrixExtensions.ToString(extParameters.ExtrinsicMatrix));
            Console.WriteLine("Known extrinsic (translation vector):\n {0}", RGBLocalization.Utility.MatrixExtensions.ToString(extParameters.TranslationVector));

            var intParameters = new Emgu.CV.IntrinsicCameraParameters();
            intParameters.IntrinsicMatrix = calibrationMatrix.ToEmguMatrix();

            var projectedPoints =
                Emgu.CV.CameraCalibration.ProjectPoints(
                worldPoints.ColumnEnumerator().Select(col => new MCvPoint3D32f((float)col.Item2[0], (float)col.Item2[1], (float)col.Item2[2])).ToArray(),
                extParameters,
                intParameters);

            Console.WriteLine("Known world: \n{0:0.00}", worldPoints);
            Console.WriteLine("Projeted points: \n{0:0.00}", projectedPoints.ToMatrix(p => new double[] { p.X, p.Y }).Transpose());

            var inferredExtParameters =
            Emgu.CV.CameraCalibration.FindExtrinsicCameraParams2(
                worldPoints.ColumnEnumerator().Select(col => new MCvPoint3D32f((float)col.Item2[0], (float)col.Item2[1], (float)col.Item2[2])).ToArray(),
                projectedPoints,
                intParameters
                );

            Console.WriteLine("Inferred Ext: \n{0:0.00}", RGBLocalization.Utility.MatrixExtensions.ToString(inferredExtParameters.ExtrinsicMatrix));
            Console.WriteLine("Inferred Ext (translation vector): \n{0:0.00}", RGBLocalization.Utility.MatrixExtensions.ToString(inferredExtParameters.TranslationVector));
        }
        public static void TestImageMapConsistency(string mapFile, string poseFile)
        {
            Func<System.Drawing.PointF, System.Drawing.PointF, double> distance2D = (p1, p2) =>
                            Math.Sqrt(Math.Pow(p1.X - p2.X, 2) + Math.Pow(p1.Y - p2.Y, 2));

            Func<DenseVector, System.Drawing.PointF> ToPointF = v => new System.Drawing.PointF((float)v[0], (float)v[1]);

            //string mapFile = @"C:\Kamal\RSE\WorkingDirs\Visualizaton\espresso-1.bag.dump.map";
            //string poseFile = @"C:\Kamal\RSE\RawData\espresso-1-fs-0\espresso-1-fs-0\espresso-1-fs-0\loop_closure\loop-closure.txt";

            DenseMatrix camCalibration = (DenseMatrix)Pose3D.CreateCalibrationMatrix(525, 320, 240);
            var intParameters = new Emgu.CV.IntrinsicCameraParameters();
            intParameters.IntrinsicMatrix = camCalibration.ToEmguMatrix();

            var poseData = ImageMap.ParsePoseData(File.ReadLines(poseFile),
                                    (frameID, poseQuaternion, posePosition) =>
                                        new
                                        {
                                            frameID,
                                            poseQuaternion,
                                            posePosition
                                        })
                                        .ToDictionary(p => p.frameID, p => new { p.posePosition, p.poseQuaternion });

            Func<DenseVector, DenseVector, DenseVector, System.Drawing.PointF> OpenCVProject =
                (point3D, posePosition, poseQuat) =>
                {
                    var extParameters = new Emgu.CV.ExtrinsicCameraParameters();
                    extParameters.RotationVector = new Emgu.CV.RotationVector3D();
                    extParameters.RotationVector.RotationMatrix = poseQuat.QuaternionToRotation().Inverse().ToEmguMatrix();
                    extParameters.TranslationVector = (-posePosition).ToColumnMatrix().ToEmguMatrix();

                    return
                    Emgu.CV.CameraCalibration.ProjectPoints(
                    new MCvPoint3D32f[] { new MCvPoint3D32f((float)point3D[0], (float)point3D[1], (float)point3D[2]) },
                    extParameters,
                    intParameters)[0];
                };

            var imageMap =
                ImageMap.LoadImageMap(mapFile, (frameID, imagePoint, depth, point3D, descriptor) =>
                                new
                                {
                                    frameID,
                                    projectedPoint = OpenCVProject(point3D, poseData[frameID].posePosition, poseData[frameID].poseQuaternion),
                                    myImpProjectedPoint = (DenseVector)Pose3D.WorldToImagePoints(point3D.ToColumnMatrix(), camCalibration, poseData[frameID].posePosition, poseData[frameID].poseQuaternion.QuaternionToRotation()).Column(0),
                                    imagePoint,
                                    depth,
                                    point3D,
                                    descriptor
                                }).ToList();

            Console.WriteLine("OpevCV vs. myImp Projection error: {0}", imageMap.Average(p => distance2D(ToPointF(p.myImpProjectedPoint), p.projectedPoint)));
            Console.WriteLine("OpevCV vs. imagepoints Projection error: {0}", imageMap.Average(p => distance2D(p.imagePoint, p.projectedPoint)));

            foreach (var p in imageMap.Where(p => distance2D(ToPointF(p.myImpProjectedPoint), p.projectedPoint) > 1000).Take(10))
            {
                Console.WriteLine("----------");
                Console.WriteLine("OpenCV:\t{0}", p.projectedPoint);
                Console.WriteLine("Image:\t{0}", p.imagePoint);
                Console.WriteLine("Depth:\t{0}", p.depth);
                Console.WriteLine("point3d:\t {0}", p.point3D);
                Console.WriteLine("pose:\t {0}", poseData[p.frameID].posePosition);
                Console.WriteLine("quat:\t {0}", poseData[p.frameID].poseQuaternion);
                Console.WriteLine("my proj impl:\t{0}",
                    p.point3D.ToColumnMatrix().WorldToImagePoints(camCalibration, poseData[p.frameID].posePosition, poseData[p.frameID].poseQuaternion.QuaternionToRotation()));

                Console.WriteLine("Re-inferred 3d point: {0}",
                    Pose3D.DPixelToWorld(
                        poseData[p.frameID].poseQuaternion,
                        poseData[p.frameID].posePosition,
                        (DenseMatrix)camCalibration.Inverse(),
                        new DenseMatrix(3, 1, new double[] { p.imagePoint.X, p.imagePoint.Y, 1.0 }),
                        new DenseMatrix(1, 1, new double[] { p.depth })));
            }
        }