Ejemplo n.º 1
0
 /// <summary>
 /// Construct reference plane from extrinsic calibration
 /// </summary>
 /// <param name="ecp">Extrinsic calibration</param>
 public ReferencePlane(Emgu.CV.ExtrinsicCameraParameters ecp, Vector[] deviation_points, double[] deviations)
 {
     _ecp              = ecp;
     _plane            = new Plane(ecp);
     _deviation_points = deviation_points;
     _deviations       = deviations;
 }
Ejemplo n.º 2
0
        /// <summary>
        /// Construct plane from extrinsic coordinate system
        /// </summary>
        /// <remarks>Z-vector is used as normal and translation offset is used to calculate D.</remarks>
        /// <param name="ecp"></param>
        public Plane(Emgu.CV.ExtrinsicCameraParameters ecp)
        {
            Matrix m = ecp.ExtrinsicMatrix.ToParsley();

            _normal = m.GetColumnVector(2).Normalize();
            _d      = -Vector.ScalarProduct(m.GetColumnVector(3), _normal);
        }
Ejemplo n.º 3
0
 /// <summary>
 /// Draw a visual indication of the pattern coordinate frame
 /// </summary>
 /// <param name="img">Image to draw to</param>
 /// <param name="ecp">Extrinsic calibration</param>
 /// <param name="icp">Intrinsic calibration</param>
 public virtual void DrawCoordinateFrame(
     Emgu.CV.Image <Bgr, Byte> img,
     Emgu.CV.ExtrinsicCameraParameters ecp,
     Emgu.CV.IntrinsicCameraParameters icp)
 {
     Drawing.DrawCoordinateFrame(img, ecp, icp);
 }
Ejemplo n.º 4
0
        public override object EditValue(
            ITypeDescriptorContext context,
            IServiceProvider provider,
            object value)
        {
            if (context == null)
            {
                return(null);
            }


            using (OpenFileDialog ofd = new OpenFileDialog()) {
                ofd.Filter = "Extrinsic Files|*.ecp";
                if (ofd.ShowDialog() == DialogResult.OK)
                {
                    Emgu.CV.ExtrinsicCameraParameters ecp = null;
                    try {
                        using (Stream s = File.Open(ofd.FileName, FileMode.Open)) {
                            if (s != null)
                            {
                                IFormatter formatter = new BinaryFormatter();
                                ecp = formatter.Deserialize(s) as Emgu.CV.ExtrinsicCameraParameters;
                                s.Close();
                            }
                        }
                    } catch (Exception) { }
                    return(ecp);
                }
                else
                {
                    return(value);
                }
            }
        }
Ejemplo n.º 5
0
        public static void CalibrationError(
            Emgu.CV.ExtrinsicCameraParameters ecp,
            Emgu.CV.IntrinsicCameraParameters icp,
            System.Drawing.PointF[] image_points,
            Vector[] reference_points,
            out double[] deviations,
            out Vector[] isect_points)
        {
            // Shoot rays through image points,
            // intersect with plane defined by extrinsic
            // and measure distance to reference points
            Matrix inv_ecp = Matrix.Identity(4, 4);

            inv_ecp.SetMatrix(0, 2, 0, 3, ecp.ExtrinsicMatrix.ToParsley());
            inv_ecp = inv_ecp.Inverse();

            Ray[] rays = Ray.EyeRays(icp, image_points);
            Plane p    = new Plane(ecp);

            isect_points = new Vector[rays.Length];

            deviations = new double[rays.Length];
            for (int i = 0; i < rays.Length; ++i)
            {
                double t;
                Intersection.RayPlane(rays[i], p, out t);
                Vector isect = rays[i].At(t);
                Vector x     = new Vector(new double[] { isect[0], isect[1], isect[2], 1 });
                x = (inv_ecp * x.ToColumnMatrix()).GetColumnVector(0);
                Vector final = new Vector(new double[] { x[0], x[1], x[2] });
                isect_points[i] = final;
                deviations[i]   = (final - reference_points[i]).Norm();
            }
        }
Ejemplo n.º 6
0
        PointF Backproject(Vector p, Emgu.CV.ExtrinsicCameraParameters ecp)
        {
            Matrix m = Matrix.Identity(4, 4);

            m.SetMatrix(0, 2, 0, 3, ecp.ExtrinsicMatrix.ToParsley());
            Matrix i = m.Inverse();
            Vector x = new Vector(new double[] { p[0], p[1], p[2], 1 });
            Matrix r = i * x.ToColumnMatrix();

            PointF[] coords = Emgu.CV.CameraCalibration.ProjectPoints(
                new MCvPoint3D32f[] {
                new MCvPoint3D32f((float)r[0, 0], (float)r[1, 0], (float)r[2, 0])
            },
                ecp,
                Context.Setup.World.Camera.Intrinsics);
            return(coords[0]);
        }
Ejemplo n.º 7
0
        override protected void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
        {
            Core.CalibrationPattern    pattern = this.Context.CalibrationPattern;
            Emgu.CV.Image <Gray, Byte> gray    = img.Convert <Gray, Byte>();
            gray._EqualizeHist();
            pattern.FindPattern(gray);

            if (pattern.PatternFound)
            {
                Emgu.CV.ExtrinsicCameraParameters ecp = _ex.Calibrate(pattern.ImagePoints);
                lock (Context.Viewer) {
                    Matrix m = Matrix.Identity(4, 4);
                    m.SetMatrix(0, 2, 0, 3, ecp.ExtrinsicMatrix.ToParsley());
                    _board_transform.Matrix = m.ToInterop();
                }
            }

            pattern.DrawPattern(img, pattern.ImagePoints, pattern.PatternFound);
        }
Ejemplo n.º 8
0
        public static void VisualizeError(
            Emgu.CV.Image <Bgr, Byte> img,
            Emgu.CV.ExtrinsicCameraParameters ecp,
            Emgu.CV.IntrinsicCameraParameters icp,
            double[] deviations,
            Vector[] isect_points)
        {
            Bgr     bgr = new Bgr(System.Drawing.Color.Green);
            MCvFont f   = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8);

            foreach (Vector p in isect_points)
            {
                System.Drawing.PointF[] coords = Emgu.CV.CameraCalibration.ProjectPoints(
                    new MCvPoint3D32f[] { p.ToEmguF() },
                    ecp, icp
                    );
                img.Draw(new CircleF(coords[0], 1), bgr, 1);
            }
        }
Ejemplo n.º 9
0
        /// <summary>
        /// Draw a visual indication of the pattern coordinate frame
        /// </summary>
        /// <param name="img">Image to draw to</param>
        /// <param name="ecp">Extrinsic calibration</param>
        /// <param name="icp">Intrinsic calibration</param>
        public static void DrawCoordinateFrame(
            Emgu.CV.Image <Bgr, Byte> img,
            Emgu.CV.ExtrinsicCameraParameters ecp,
            Emgu.CV.IntrinsicCameraParameters icp)
        {
            float extension = img.Width / 10;

            PointF[] coords = Emgu.CV.CameraCalibration.ProjectPoints(
                new MCvPoint3D32f[] {
                new MCvPoint3D32f(0, 0, 0),
                new MCvPoint3D32f(extension, 0, 0),
                new MCvPoint3D32f(0, extension, 0),
                new MCvPoint3D32f(0, 0, extension),
            },
                ecp, icp);

            img.Draw(new LineSegment2DF(coords[0], coords[1]), new Bgr(System.Drawing.Color.Red), 2);
            img.Draw(new LineSegment2DF(coords[0], coords[2]), new Bgr(System.Drawing.Color.Green), 2);
            img.Draw(new LineSegment2DF(coords[0], coords[3]), new Bgr(System.Drawing.Color.Blue), 2);
        }
Ejemplo n.º 10
0
        public static void TestOpenCVProjection()
        {
            int numPoints = 1;

            Matrix<double> worldPoints = DenseMatrix.CreateRandom(2, numPoints, new ContinuousUniform(100, 300));
            worldPoints = worldPoints.InsertRow(2, DenseVector.CreateRandom(numPoints, new ContinuousUniform(500, 2000)));

            var posePosition = new DenseVector(new double[] { 1000, 2000, 3000 });
            var poseQuat = new DenseVector(new double[] { 0.4823661149, -0.009425591677, 0.8759094477, -0.004083401989 });
            //var poseQuat = new DenseVector(new double[] { 1, 0, 0, 0 });

            var extParameters = new Emgu.CV.ExtrinsicCameraParameters();
            extParameters.RotationVector = new Emgu.CV.RotationVector3D();
            extParameters.RotationVector.RotationMatrix = poseQuat.QuaternionToRotation().Inverse().ToEmguMatrix();
            extParameters.TranslationVector = (-posePosition).ToColumnMatrix().ToEmguMatrix();

            DenseMatrix calibrationMatrix = (DenseMatrix)Pose3D.CreateCalibrationMatrix(525, 320, 240);

            var intParameters = new Emgu.CV.IntrinsicCameraParameters();
            intParameters.IntrinsicMatrix = calibrationMatrix.ToEmguMatrix();

            var openCVProjectedPoints =
                Emgu.CV.CameraCalibration.ProjectPoints(
                worldPoints.ColumnEnumerator().Select(col => new MCvPoint3D32f((float)col.Item2[0], (float)col.Item2[1], (float)col.Item2[2])).ToArray(),
                extParameters,
                intParameters);

            Console.WriteLine("Open CV Projeted points: \n{0:0.00}", openCVProjectedPoints.ToMatrix(p => new double[] { p.X, p.Y }).Transpose());

            var myProjectedPoints = Pose3D.WorldToImagePoints(
                                            worldPoints,
                                            calibrationMatrix,
                                            posePosition,
                                            poseQuat.QuaternionToRotation());
            Console.WriteLine("My Projeted points: \n{0:0.00}", myProjectedPoints);
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Generate a intrinsic/extrinsic calibration from an initial set of four marker points.
        /// </summary>
        /// <param name="finals">List of ellipses</param>
        /// <param name="marker_ids">Ellipse ids of detected marker-ellipses</param>
        /// <param name="icp">Resulting intrinsic calibration</param>
        /// <param name="ecp">Resulting extrinsic calibration</param>
        /// <param name="size">Image size</param>
        private void ApproximatePlane(List <DetectedEllipse> finals, int[] marker_ids, out Emgu.CV.IntrinsicCameraParameters icp, out Emgu.CV.ExtrinsicCameraParameters ecp, System.Drawing.Size size)
        {
            // Currently the marker points correspond to corner points in the rectangular pattern.
            Vector[] object_points = new Vector[] {
                this.ObjectPoints[0],
                this.ObjectPoints[_number_circle_centers.Width - 1],
                this.ObjectPoints[_number_circle_centers.Width * (_number_circle_centers.Height - 1)],
                this.ObjectPoints[_number_circle_centers.Width * _number_circle_centers.Height - 1]
            };

            System.Drawing.PointF[] image_points =
                marker_ids.Select(id => { return(finals[id].Ellipse.MCvBox2D.center); }).ToArray();

            Parsley.Core.IntrinsicCalibration ic = new Parsley.Core.IntrinsicCalibration(object_points, size);
            ic.AddView(image_points);
            icp = ic.Calibrate();
            Parsley.Core.ExtrinsicCalibration ec = new Parsley.Core.ExtrinsicCalibration(object_points, icp);
            ecp = ec.Calibrate(image_points);
        }
Ejemplo n.º 12
0
        //confirm 3d geometry to infer the pose given many world points and 2d pixels
        public static void TestPoseEstimationOpenCV()
        {
            DenseVector poseQuat = new DenseVector(new double[] { 1, 0, 0, 0 });
            DenseVector posePosition = new DenseVector(new double[] { 0, 0, 0 });

            DenseMatrix calibrationMatrix = (DenseMatrix)Pose3D.CreateCalibrationMatrix(525, 320, 240);

            int numPoints = 7;

            Matrix<double> worldPoints = DenseMatrix.CreateRandom(2, numPoints, new ContinuousUniform(100, 300));
            worldPoints = worldPoints.InsertRow(2, DenseVector.CreateRandom(numPoints, new ContinuousUniform(500, 2000)));

            var extParameters = new Emgu.CV.ExtrinsicCameraParameters();
            extParameters.RotationVector = new Emgu.CV.RotationVector3D(new double[] { 4, 5, 6 });
            extParameters.TranslationVector = new Emgu.CV.Matrix<double>(new double[] { 1, 2, 3 });

            Console.WriteLine("Known extrinsic:\n {0}", RGBLocalization.Utility.MatrixExtensions.ToString(extParameters.ExtrinsicMatrix));
            Console.WriteLine("Known extrinsic (translation vector):\n {0}", RGBLocalization.Utility.MatrixExtensions.ToString(extParameters.TranslationVector));

            var intParameters = new Emgu.CV.IntrinsicCameraParameters();
            intParameters.IntrinsicMatrix = calibrationMatrix.ToEmguMatrix();

            var projectedPoints =
                Emgu.CV.CameraCalibration.ProjectPoints(
                worldPoints.ColumnEnumerator().Select(col => new MCvPoint3D32f((float)col.Item2[0], (float)col.Item2[1], (float)col.Item2[2])).ToArray(),
                extParameters,
                intParameters);

            Console.WriteLine("Known world: \n{0:0.00}", worldPoints);
            Console.WriteLine("Projeted points: \n{0:0.00}", projectedPoints.ToMatrix(p => new double[] { p.X, p.Y }).Transpose());

            var inferredExtParameters =
            Emgu.CV.CameraCalibration.FindExtrinsicCameraParams2(
                worldPoints.ColumnEnumerator().Select(col => new MCvPoint3D32f((float)col.Item2[0], (float)col.Item2[1], (float)col.Item2[2])).ToArray(),
                projectedPoints,
                intParameters
                );

            Console.WriteLine("Inferred Ext: \n{0:0.00}", RGBLocalization.Utility.MatrixExtensions.ToString(inferredExtParameters.ExtrinsicMatrix));
            Console.WriteLine("Inferred Ext (translation vector): \n{0:0.00}", RGBLocalization.Utility.MatrixExtensions.ToString(inferredExtParameters.TranslationVector));
        }
Ejemplo n.º 13
0
 /// <summary>
 /// Construct reference plane from extrinsic calibration
 /// </summary>
 /// <param name="ecp">Extrinsic calibration</param>
 public ReferencePlane(Emgu.CV.ExtrinsicCameraParameters ecp, Vector[] deviation_points, double[] deviations) {
   _ecp = ecp;
   _plane = new Plane(ecp);
   _deviation_points = deviation_points;
   _deviations = deviations;
 }
Ejemplo n.º 14
0
        public static void TestImageMapConsistency(string mapFile, string poseFile)
        {
            Func<System.Drawing.PointF, System.Drawing.PointF, double> distance2D = (p1, p2) =>
                            Math.Sqrt(Math.Pow(p1.X - p2.X, 2) + Math.Pow(p1.Y - p2.Y, 2));

            Func<DenseVector, System.Drawing.PointF> ToPointF = v => new System.Drawing.PointF((float)v[0], (float)v[1]);

            //string mapFile = @"C:\Kamal\RSE\WorkingDirs\Visualizaton\espresso-1.bag.dump.map";
            //string poseFile = @"C:\Kamal\RSE\RawData\espresso-1-fs-0\espresso-1-fs-0\espresso-1-fs-0\loop_closure\loop-closure.txt";

            DenseMatrix camCalibration = (DenseMatrix)Pose3D.CreateCalibrationMatrix(525, 320, 240);
            var intParameters = new Emgu.CV.IntrinsicCameraParameters();
            intParameters.IntrinsicMatrix = camCalibration.ToEmguMatrix();

            var poseData = ImageMap.ParsePoseData(File.ReadLines(poseFile),
                                    (frameID, poseQuaternion, posePosition) =>
                                        new
                                        {
                                            frameID,
                                            poseQuaternion,
                                            posePosition
                                        })
                                        .ToDictionary(p => p.frameID, p => new { p.posePosition, p.poseQuaternion });

            Func<DenseVector, DenseVector, DenseVector, System.Drawing.PointF> OpenCVProject =
                (point3D, posePosition, poseQuat) =>
                {
                    var extParameters = new Emgu.CV.ExtrinsicCameraParameters();
                    extParameters.RotationVector = new Emgu.CV.RotationVector3D();
                    extParameters.RotationVector.RotationMatrix = poseQuat.QuaternionToRotation().Inverse().ToEmguMatrix();
                    extParameters.TranslationVector = (-posePosition).ToColumnMatrix().ToEmguMatrix();

                    return
                    Emgu.CV.CameraCalibration.ProjectPoints(
                    new MCvPoint3D32f[] { new MCvPoint3D32f((float)point3D[0], (float)point3D[1], (float)point3D[2]) },
                    extParameters,
                    intParameters)[0];
                };

            var imageMap =
                ImageMap.LoadImageMap(mapFile, (frameID, imagePoint, depth, point3D, descriptor) =>
                                new
                                {
                                    frameID,
                                    projectedPoint = OpenCVProject(point3D, poseData[frameID].posePosition, poseData[frameID].poseQuaternion),
                                    myImpProjectedPoint = (DenseVector)Pose3D.WorldToImagePoints(point3D.ToColumnMatrix(), camCalibration, poseData[frameID].posePosition, poseData[frameID].poseQuaternion.QuaternionToRotation()).Column(0),
                                    imagePoint,
                                    depth,
                                    point3D,
                                    descriptor
                                }).ToList();

            Console.WriteLine("OpevCV vs. myImp Projection error: {0}", imageMap.Average(p => distance2D(ToPointF(p.myImpProjectedPoint), p.projectedPoint)));
            Console.WriteLine("OpevCV vs. imagepoints Projection error: {0}", imageMap.Average(p => distance2D(p.imagePoint, p.projectedPoint)));

            foreach (var p in imageMap.Where(p => distance2D(ToPointF(p.myImpProjectedPoint), p.projectedPoint) > 1000).Take(10))
            {
                Console.WriteLine("----------");
                Console.WriteLine("OpenCV:\t{0}", p.projectedPoint);
                Console.WriteLine("Image:\t{0}", p.imagePoint);
                Console.WriteLine("Depth:\t{0}", p.depth);
                Console.WriteLine("point3d:\t {0}", p.point3D);
                Console.WriteLine("pose:\t {0}", poseData[p.frameID].posePosition);
                Console.WriteLine("quat:\t {0}", poseData[p.frameID].poseQuaternion);
                Console.WriteLine("my proj impl:\t{0}",
                    p.point3D.ToColumnMatrix().WorldToImagePoints(camCalibration, poseData[p.frameID].posePosition, poseData[p.frameID].poseQuaternion.QuaternionToRotation()));

                Console.WriteLine("Re-inferred 3d point: {0}",
                    Pose3D.DPixelToWorld(
                        poseData[p.frameID].poseQuaternion,
                        poseData[p.frameID].posePosition,
                        (DenseMatrix)camCalibration.Inverse(),
                        new DenseMatrix(3, 1, new double[] { p.imagePoint.X, p.imagePoint.Y, 1.0 }),
                        new DenseMatrix(1, 1, new double[] { p.depth })));
            }
        }
Ejemplo n.º 15
0
        /// <summary>
        /// Tries to find the composite pattern and returns the output parameter image_points.
        /// In case of success the boolean value 'true' is returned.
        /// Note, that CompositePatterns can only be found, if the cameras' intrinsics are set.
        ///
        /// The algorithm is working as follows:
        /// If the main pattern 'patternA' could be found, the algorithm is finished already and the resulting
        /// image_points are known and returned.
        /// If only 'patternB' could be found, the given object_points of 'patternA' are transformed in the
        /// 'patternB' coordinate system, using the predefined transformation matrix.
        /// Furthermore, an extrinsic calibration is performed in order to find the extrinsic matrix, which describes
        /// the relation between camera coordinate system and the coordinate system of 'patternB'.
        /// Finally, the library function 'ProjectPoints' is called in order to project the transformed object_points
        /// (currently expressed in 'patternB'-coordinates) into the camera image plane.
        /// The projected points correspond to the image_points of 'patternA'.
        /// ==> To sum up: the predefined transformation is used to calculate the image_points of 'patternA', even
        /// if 'patternA' is invisible.
        /// </summary>
        /// <param name="img"> Input grayscale image. </param>
        /// <param name="image_points"> 2D output image points. </param>
        /// <returns> true... if pattern has been found; false... otherwise. </returns>
        public override bool FindPattern(Emgu.CV.Image <Emgu.CV.Structure.Gray, byte> img, out System.Drawing.PointF[] image_points)
        {
            if (this.IntrinsicParameters != null && _patternA != null && _patternB != null)
            {
                bool foundA = false;
                System.Drawing.PointF[] currentImagePointsA;
                System.Drawing.PointF[] currentImagePointsB;

                //set the object_points of the composite pattern to the object_points of 'patternA'
                this.ObjectPoints = _patternA.ObjectPoints;

                //try to find 'patternA'
                foundA = _patternA.FindPattern(img, out currentImagePointsA);

                //if 'patternA' could be found: the image_points have been found.
                if (foundA)
                {
                    image_points = currentImagePointsA;
                    //_logger.Info("Pattern found.");
                    return(true);
                }
                else
                //else: try to find 'patternB'
                if (_patternB.FindPattern(img, out currentImagePointsB))
                {
                    ExtrinsicCalibration ec_B = null;
                    Emgu.CV.ExtrinsicCameraParameters ecp_B = null;
                    Matrix extrinsic_matrix = Matrix.Identity(4, 4);
                    Matrix temp_matrix      = null;
                    Emgu.CV.Structure.MCvPoint3D32f[] transformedCornerPoints = new Emgu.CV.Structure.MCvPoint3D32f[_patternA.ObjectPoints.Length];

                    try
                    {
                        //if 'patternB' has been found: find the extrinsic matrix (relation between coordinate systems of 'patternB' and camera
                        ec_B  = new ExtrinsicCalibration(_patternB.ObjectPoints, this.IntrinsicParameters);
                        ecp_B = ec_B.Calibrate(currentImagePointsB);

                        if (ecp_B != null)
                        {
                            //form the resulting extrinsic matrix to a homogeneous (4x4) matrix.
                            temp_matrix = Parsley.Core.Extensions.ConvertToParsley.ToParsley(ecp_B.ExtrinsicMatrix);
                            extrinsic_matrix.SetMatrix(0, temp_matrix.RowCount - 1, 0, temp_matrix.ColumnCount - 1, temp_matrix);

                            //transform object points of A into B coordinate system.
                            transformedCornerPoints = MatrixTransformation.TransformVectorToEmgu(_transformationBToA.Inverse(), 1.0, _patternA.ObjectPoints).ToArray <Emgu.CV.Structure.MCvPoint3D32f>();

                            //project the points into the 2D camera plane (image_points)
                            image_points = Emgu.CV.CameraCalibration.ProjectPoints(transformedCornerPoints, ecp_B, this.IntrinsicParameters);
                            return(true);
                        }
                        else
                        {
                            _logger.Warn("Error calculating extrinsic parameters.");
                            image_points = null;
                            return(false);
                        }
                    }
                    catch (Exception e)
                    {
                        _logger.Warn("Caught Exception: {0}.", e);
                        image_points = null;
                        return(false);
                    }
                }
                else
                {
                    //reset the image_points if the pattern could not be found.
                    image_points = null;
                    return(false);
                }
            }
            else
            {
                _logger.Warn("Error: Intrinsics are needed to find a Composite Pattern but not available.");
                image_points = null;
                return(false);
            }
        }