Ejemplo n.º 1
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="directory"></param>
        /// <param name="objPath"></param>
        /// <param name="depthImage"></param>
        /// <param name="rgbImage"></param>
        /// <param name="pose">Optional transformation pose (4x4 Matrix). Supply Identity by default</param>
        public static void Save(string objPath, Kinect2Calibration calibration, ShortImage depthImage, string colorImageFileName, Matrix pose)
        {
            var objFilename  = Path.GetFileNameWithoutExtension(objPath);
            var objDirectory = Path.GetDirectoryName(objPath);

            if (!Directory.Exists(objDirectory))
            {
                Directory.CreateDirectory(objDirectory);
            }

            //copy the background color image to file
            //SaveColorToJPEG(objDirectory + "/" + objFilename + ".jpg", rgbImage);
            if (File.Exists(colorImageFileName))
            {
                File.Copy(colorImageFileName, objDirectory + "/" + objFilename + ".jpg", true);
            }
            else
            {
                Console.WriteLine("Saving to OBJ Error! File " + colorImageFileName + " doesn't exists!");
            }

            // Because we need to form triangles, we go back to the depth image
            var quadOffsets = new System.Drawing.Point[]
            {
                new System.Drawing.Point(0, 0),
                new System.Drawing.Point(1, 0),
                new System.Drawing.Point(0, 1),
                new System.Drawing.Point(1, 0),
                new System.Drawing.Point(1, 1),
                new System.Drawing.Point(0, 1),
            };

            var streamWriter  = new StreamWriter(objDirectory + "/" + objFilename + ".obj");
            var mtlFileWriter = new StreamWriter(objDirectory + "/" + objFilename + ".mtl");

            streamWriter.WriteLine("mtllib " + objFilename + ".mtl");
            uint nextVertexIndex = 1;

            //var depthImage = new FloatImage(Kinect2Calibration.depthImageWidth, Kinect2Calibration.depthImageHeight);


            mtlFileWriter.WriteLine("newmtl camera0");
            mtlFileWriter.WriteLine("Ka 1.000000 1.000000 1.000000");
            mtlFileWriter.WriteLine("Kd 1.000000 1.000000 1.000000");
            mtlFileWriter.WriteLine("Ks 0.000000 0.000000 0.000000");
            mtlFileWriter.WriteLine("Tr 1.000000");
            mtlFileWriter.WriteLine("illum 1");
            mtlFileWriter.WriteLine("Ns 0.000000");
            mtlFileWriter.WriteLine("map_Kd " + objFilename + ".jpg");


            streamWriter.WriteLine("usemtl camera0");

            // load depth image
            //string cameraDirectory = directory + "/camera" + camera.name;
            //depthImage.LoadFromFile(cameraDirectory + "/mean.bin");

            //var calibration = camera.calibration;
            var depthFrameToCameraSpaceTable = calibration.ComputeDepthFrameToCameraSpaceTable();
            var vertices    = new Vertex[Kinect2Calibration.depthImageWidth * Kinect2Calibration.depthImageHeight];
            var colorCamera = new Matrix(4, 1);
            var depthCamera = new Matrix(4, 1);
            var world       = new Matrix(4, 1);

            for (int y = 0; y < Kinect2Calibration.depthImageHeight; y++)
            {
                for (int x = 0; x < Kinect2Calibration.depthImageWidth; x++)
                {
                    // depth camera coords
                    var depth = depthImage[x, y] / 1000f; // m
                    // convert to depth camera space
                    var point = depthFrameToCameraSpaceTable[Kinect2Calibration.depthImageWidth * y + x];
                    depthCamera[0] = point.X * depth;
                    depthCamera[1] = point.Y * depth;
                    depthCamera[2] = depth;
                    depthCamera[3] = 1;

                    // world coordinates
                    world.Mult(pose, depthCamera);
                    //world.Scale(1.0 / world[3]); not necessary for this transform

                    // convert to color camera space
                    colorCamera.Mult(calibration.depthToColorTransform, depthCamera);
                    colorCamera.Scale(1.0 / colorCamera[3]);

                    // project to color image
                    double colorU, colorV;
                    CameraMath.Project(calibration.colorCameraMatrix, calibration.colorLensDistortion, colorCamera[0], colorCamera[1], colorCamera[2], out colorU, out colorV);
                    colorU /= (double)Kinect2Calibration.colorImageWidth;
                    colorV /= (double)Kinect2Calibration.colorImageHeight;

                    var vertex = new Vertex();
                    vertex.x = (float)world[0];
                    vertex.y = (float)world[1];
                    vertex.z = (float)world[2];
                    vertex.u = (float)colorU;
                    vertex.v = (float)colorV;
                    vertices[Kinect2Calibration.depthImageWidth * y + x] = vertex;
                }
            }

            streamWriter.WriteLine("g camera0");
            streamWriter.WriteLine("usemtl camera0");

            // examine each triangle
            for (int y = 0; y < Kinect2Calibration.depthImageHeight - 1; y++)
            {
                for (int x = 0; x < Kinect2Calibration.depthImageWidth - 1; x++)
                {
                    int offseti = 0;
                    for (int tri = 0; tri < 2; tri++)
                    {
                        // the indexes of the vertices of this triangle
                        var i0 = Kinect2Calibration.depthImageWidth * (y + quadOffsets[offseti].Y) + (x + quadOffsets[offseti].X);
                        var i1 = Kinect2Calibration.depthImageWidth * (y + quadOffsets[offseti + 1].Y) + (x + quadOffsets[offseti + 1].X);
                        var i2 = Kinect2Calibration.depthImageWidth * (y + quadOffsets[offseti + 2].Y) + (x + quadOffsets[offseti + 2].X);

                        // is triangle valid?
                        bool nonZero = (vertices[i0].z != 0) && (vertices[i1].z != 0) && (vertices[i2].z != 0);

                        bool jump01 = Vertex.DistanceSquared(vertices[i0], vertices[i1]) < 0.2 * 0.2;
                        bool jump02 = Vertex.DistanceSquared(vertices[i0], vertices[i2]) < 0.2 * 0.2;
                        bool jump12 = Vertex.DistanceSquared(vertices[i1], vertices[i2]) < 0.2 * 0.2;

                        bool valid = nonZero && jump01 && jump02 && jump12;
                        if (valid)
                        {
                            // only add the vertex if we haven't already
                            if (vertices[i0].index == 0)
                            {
                                streamWriter.WriteLine(vertices[i0]);
                                vertices[i0].index = nextVertexIndex++;
                            }
                            if (vertices[i1].index == 0)
                            {
                                streamWriter.WriteLine(vertices[i1]);
                                vertices[i1].index = nextVertexIndex++;
                            }
                            if (vertices[i2].index == 0)
                            {
                                streamWriter.WriteLine(vertices[i2]);
                                vertices[i2].index = nextVertexIndex++;
                            }
                            streamWriter.WriteLine("f {0}/{0} {1}/{1} {2}/{2}", vertices[i0].index, vertices[i1].index, vertices[i2].index);
                        }
                        offseti += 3;
                    }
                }
            }

            streamWriter.Close();
            mtlFileWriter.Close();
        }
Ejemplo n.º 2
0
        static double CalibrateColorCamera(List <Matrix> worldPoints, List <System.Drawing.PointF> imagePoints, Matrix cameraMatrix, Matrix distCoeffs, Matrix rotation, Matrix translation, bool silent)
        {
            int nPoints = worldPoints.Count;

            {
                Matrix R, t;
                DLT(cameraMatrix, distCoeffs, worldPoints, imagePoints, out R, out t);
                var r = Orientation.RotationVector(R);
                rotation.Copy(r);
                translation.Copy(t);
            }

            // pack parameters into vector
            // parameters: fx, fy, cx, cy, k1, k2, + 3 for rotation, 3 translation = 12
            int nParameters = 12;
            var parameters  = new Matrix(nParameters, 1);

            {
                int pi = 0;
                parameters[pi++] = cameraMatrix[0, 0]; // fx
                parameters[pi++] = cameraMatrix[1, 1]; // fy
                parameters[pi++] = cameraMatrix[0, 2]; // cx
                parameters[pi++] = cameraMatrix[1, 2]; // cy
                parameters[pi++] = distCoeffs[0];      // k1
                parameters[pi++] = distCoeffs[1];      // k2
                parameters[pi++] = rotation[0];
                parameters[pi++] = rotation[1];
                parameters[pi++] = rotation[2];
                parameters[pi++] = translation[0];
                parameters[pi++] = translation[1];
                parameters[pi++] = translation[2];
            }

            // size of our error vector
            int nValues = nPoints * 2; // each component (x,y) is a separate entry

            LevenbergMarquardt.Function function = delegate(Matrix p)
            {
                var fvec = new Matrix(nValues, 1);


                // unpack parameters
                int    pi = 0;
                double fx = p[pi++];
                double fy = p[pi++];
                double cx = p[pi++];
                double cy = p[pi++];

                double k1 = p[pi++];
                double k2 = p[pi++];

                var K = Matrix.Identity(3, 3);
                K[0, 0] = fx;
                K[1, 1] = fy;
                K[0, 2] = cx;
                K[1, 2] = cy;

                var d = Matrix.Zero(5, 1);
                d[0] = k1;
                d[1] = k2;

                var r = new Matrix(3, 1);
                r[0] = p[pi++];
                r[1] = p[pi++];
                r[2] = p[pi++];

                var t = new Matrix(3, 1);
                t[0] = p[pi++];
                t[1] = p[pi++];
                t[2] = p[pi++];

                var R = Orientation.Rodrigues(r);



                var x = new Matrix(3, 1);

                int fveci = 0;
                for (int i = 0; i < worldPoints.Count; i++)
                {
                    // transform world point to local camera coordinates
                    x.Mult(R, worldPoints[i]);
                    x.Add(t);

                    // fvec_i = y_i - f(x_i)
                    double u, v;
                    Kinect2Calibration.Project(K, d, x[0], x[1], x[2], out u, out v);

                    var imagePoint = imagePoints[i];
                    fvec[fveci++] = imagePoint.X - u;
                    fvec[fveci++] = imagePoint.Y - v;
                }
                return(fvec);
            };

            // optimize
            var calibrate = new LevenbergMarquardt(function);

            while (calibrate.State == LevenbergMarquardt.States.Running)
            {
                var rmsError = calibrate.MinimizeOneStep(parameters);
                if (!silent)
                {
                    Console.WriteLine("rms error = " + rmsError);
                }
            }
            if (!silent)
            {
                for (int i = 0; i < nParameters; i++)
                {
                    Console.WriteLine(parameters[i] + "\t");
                }
                Console.WriteLine();
            }
            // unpack parameters
            {
                int    pi = 0;
                double fx = parameters[pi++];
                double fy = parameters[pi++];
                double cx = parameters[pi++];
                double cy = parameters[pi++];
                double k1 = parameters[pi++];
                double k2 = parameters[pi++];
                cameraMatrix[0, 0] = fx;
                cameraMatrix[1, 1] = fy;
                cameraMatrix[0, 2] = cx;
                cameraMatrix[1, 2] = cy;
                distCoeffs[0]      = k1;
                distCoeffs[1]      = k2;
                rotation[0]        = parameters[pi++];
                rotation[1]        = parameters[pi++];
                rotation[2]        = parameters[pi++];
                translation[0]     = parameters[pi++];
                translation[1]     = parameters[pi++];
                translation[2]     = parameters[pi++];
            }


            return(calibrate.RMSError);
        }
Ejemplo n.º 3
0
        // Use DLT to obtain estimate of calibration rig pose; in our case this is the pose of the Kinect camera.
        // This pose estimate will provide a good initial estimate for subsequent projector calibration.
        // Note for a full PnP solution we should probably refine with Levenberg-Marquardt.
        // DLT is described in Hartley and Zisserman p. 178
        static void DLT(Matrix cameraMatrix, Matrix distCoeffs, List <Matrix> worldPoints, List <System.Drawing.PointF> imagePoints, out Matrix R, out Matrix t)
        {
            int n = worldPoints.Count;

            var A = Matrix.Zero(2 * n, 12);

            for (int j = 0; j < n; j++)
            {
                var X          = worldPoints[j];
                var imagePoint = imagePoints[j];

                double x, y;
                Kinect2Calibration.Undistort(cameraMatrix, distCoeffs, imagePoint.X, imagePoint.Y, out x, out y);

                double w = 1;

                int ii = 2 * j;
                A[ii, 4] = -w * X[0];
                A[ii, 5] = -w * X[1];
                A[ii, 6] = -w * X[2];
                A[ii, 7] = -w;

                A[ii, 8]  = y * X[0];
                A[ii, 9]  = y * X[1];
                A[ii, 10] = y * X[2];
                A[ii, 11] = y;

                ii++; // next row
                A[ii, 0] = w * X[0];
                A[ii, 1] = w * X[1];
                A[ii, 2] = w * X[2];
                A[ii, 3] = w;

                A[ii, 8]  = -x * X[0];
                A[ii, 9]  = -x * X[1];
                A[ii, 10] = -x * X[2];
                A[ii, 11] = -x;
            }

            var Pcolumn = new Matrix(12, 1);
            {
                var U  = new Matrix(2 * n, 12);
                var V  = new Matrix(12, 12);
                var ww = new Matrix(12, 1);

                A.SVD(U, ww, V);

                // find smallest singular value
                int min = 0;
                ww.Minimum(ref min);

                // Pcolumn is last column of V
                Pcolumn.CopyCol(V, min);
            }

            // reshape into 3x4 projection matrix
            var P = new Matrix(3, 4);

            P.Reshape(Pcolumn);

            // x = P * X
            // P = K [ R | t ]
            // inv(K) P = [ R | t ]

            //var Kinv = new Matrix(3, 3);
            //Kinv.Inverse(cameraMatrix);
            //var Rt = new Matrix(3, 4);
            //Rt.Mult(Kinv, P);

            var Rt = new Matrix(3, 4);

            Rt.Copy(P); // P does not contain camera matrix (by earlier undistort)

            R = new Matrix(3, 3);
            t = new Matrix(3, 1);

            for (int ii = 0; ii < 3; ii++)
            {
                t[ii] = Rt[ii, 3];
                for (int jj = 0; jj < 3; jj++)
                {
                    R[ii, jj] = Rt[ii, jj];
                }
            }

            //R.Copy(0, 0, Rt);
            //t.CopyCol(Rt, 3);

            if (R.Det() < 0)
            {
                R.Scale(-1); t.Scale(-1);
            }

            // orthogonalize R
            {
                var U  = new Matrix(3, 3);
                var Vt = new Matrix(3, 3);
                var V  = new Matrix(3, 3);
                var ww = new Matrix(3, 1);
                //OpenCV.SVD.Compute(R, out ww, out U, out Vt);

                R.SVD(U, ww, V);
                Vt.Transpose(V);

                R.Mult(U, Vt);
                double s = ww.Sum() / 3.0;
                t.Scale(1.0 / s);
            }

            // compute error?
        }
Ejemplo n.º 4
0
        static double CalibrateDepthCamera(List <Matrix> worldPoints, List <System.Drawing.PointF> imagePoints, Matrix cameraMatrix, Matrix distCoeffs, bool silent)
        {
            int nPoints = worldPoints.Count;

            // pack parameters into vector
            // parameters: fx, fy, cx, cy, k1, k2 = 6 parameters
            int nParameters = 6;
            var parameters  = new Matrix(nParameters, 1);

            {
                int pi = 0;
                parameters[pi++] = cameraMatrix[0, 0]; // fx
                parameters[pi++] = cameraMatrix[1, 1]; // fy
                parameters[pi++] = cameraMatrix[0, 2]; // cx
                parameters[pi++] = cameraMatrix[1, 2]; // cy
                parameters[pi++] = distCoeffs[0];      // k1
                parameters[pi++] = distCoeffs[1];      // k2
            }

            // size of our error vector
            int nValues = nPoints * 2; // each component (x,y) is a separate entry

            LevenbergMarquardt.Function function = delegate(Matrix p)
            {
                var fvec = new Matrix(nValues, 1);

                // unpack parameters
                int    pi = 0;
                double fx = p[pi++];
                double fy = p[pi++];
                double cx = p[pi++];
                double cy = p[pi++];
                double k1 = p[pi++];
                double k2 = p[pi++];

                var K = Matrix.Identity(3, 3);
                K[0, 0] = fx;
                K[1, 1] = fy;
                K[0, 2] = cx;
                K[1, 2] = cy;

                var d = Matrix.Zero(5, 1);
                d[0] = k1;
                d[1] = k2;

                int fveci = 0;
                for (int i = 0; i < worldPoints.Count; i++)
                {
                    // fvec_i = y_i - f(x_i)
                    double u, v;
                    var    x = worldPoints[i];
                    Kinect2Calibration.Project(K, d, x[0], x[1], x[2], out u, out v);

                    var imagePoint = imagePoints[i];
                    fvec[fveci++] = imagePoint.X - u;
                    fvec[fveci++] = imagePoint.Y - v;
                }
                return(fvec);
            };

            // optimize
            var calibrate = new LevenbergMarquardt(function);

            while (calibrate.State == LevenbergMarquardt.States.Running)
            {
                var rmsError = calibrate.MinimizeOneStep(parameters);
                if (!silent)
                {
                    Console.WriteLine("rms error = " + rmsError);
                }
            }
            if (!silent)
            {
                for (int i = 0; i < nParameters; i++)
                {
                    Console.WriteLine(parameters[i] + "\t");
                }
                Console.WriteLine();
            }

            // unpack parameters
            {
                int    pi = 0;
                double fx = parameters[pi++];
                double fy = parameters[pi++];
                double cx = parameters[pi++];
                double cy = parameters[pi++];
                double k1 = parameters[pi++];
                double k2 = parameters[pi++];
                cameraMatrix[0, 0] = fx;
                cameraMatrix[1, 1] = fy;
                cameraMatrix[0, 2] = cx;
                cameraMatrix[1, 2] = cy;
                distCoeffs[0]      = k1;
                distCoeffs[1]      = k2;
            }


            return(calibrate.RMSError);
        }