示例#1
0
        public Solve()
        {
            //  x +  y +  z = 6
            // 2x - 3y + 4z = 8
            // 4x + 4y - 4z = 0

            double[] A = new double[]{
                1, 1, 1,
                2, -3, 4,
                4, 4, -4
            };
            double[] B = new double[]{
                6,
                8,
                0
            };

            CvMat matA = new CvMat(3, 3, MatrixType.F64C1, A);
            CvMat matB = new CvMat(3, 1, MatrixType.F64C1, B);

            // X = inv(A) * B
            CvMat matAInv = matA.Clone();
            matA.Inv(matAInv);

            CvMat matX = matAInv * matB;

            Console.WriteLine("X = {0}", matX[0].Val0);
            Console.WriteLine("Y = {0}", matX[1].Val0);
            Console.WriteLine("Z = {0}", matX[2].Val0);
            Console.Read();
        }
示例#2
0
        public Solve()
        {
            //  x +  y +  z = 6
            // 2x - 3y + 4z = 8
            // 4x + 4y - 4z = 0

            double[] A = new double[] {
                1, 1, 1,
                2, -3, 4,
                4, 4, -4
            };
            double[] B = new double[] {
                6,
                8,
                0
            };

            CvMat matA = new CvMat(3, 3, MatrixType.F64C1, A);
            CvMat matB = new CvMat(3, 1, MatrixType.F64C1, B);

            // X = inv(A) * B
            CvMat matAInv = matA.Clone();

            matA.Inv(matAInv);

            CvMat matX = matAInv * matB;

            Console.WriteLine("X = {0}", matX[0].Val0);
            Console.WriteLine("Y = {0}", matX[1].Val0);
            Console.WriteLine("Z = {0}", matX[2].Val0);
            Console.Read();
        }
示例#3
0
        private double[] Cross_intersection_3D(double[] Intersection, double[,] first_projcet, double[,] second_projcet, CvPoint Correspond_point, int contour_point_index, double red, double green, double blue, int left_image_number, int right_image_number, int contour_index_model_operation_number)//求空间点坐标
        {
            //IplImage first_image = new IplImage(image2, LoadMode.Color);
            //根据《空间点三维重建新方法及其不确定性研究》论文完成

            double[,] A = new double[4, 3] {
                { Intersection[1] * first_projcet[2, 0] - first_projcet[0, 0], Intersection[1] * first_projcet[2, 1] - first_projcet[0, 1], Intersection[1] * first_projcet[2, 2] - first_projcet[0, 2] },
                { Intersection[2] * first_projcet[2, 0] - first_projcet[1, 0], Intersection[2] * first_projcet[2, 1] - first_projcet[1, 1], Intersection[2] * first_projcet[2, 2] - first_projcet[1, 2] },
                { Correspond_point.X *second_projcet[2, 0] - second_projcet[0, 0], Correspond_point.X *second_projcet[2, 1] - second_projcet[0, 1], Correspond_point.X *second_projcet[2, 2] - second_projcet[0, 2] },
                { Correspond_point.Y *second_projcet[2, 0] - second_projcet[1, 0], Correspond_point.Y *second_projcet[2, 1] - second_projcet[1, 1], Correspond_point.Y *second_projcet[2, 2] - second_projcet[1, 2] }
            };


            double[,] y = new double[6, 1] {
                { first_projcet[0, 3] - Intersection[1] * first_projcet[2, 3] },
                { first_projcet[1, 3] - Intersection[2] * first_projcet[2, 3] },
                { second_projcet[0, 3] - (Correspond_point.X * second_projcet[2, 3]) },
                { second_projcet[1, 3] - (Correspond_point.Y * second_projcet[2, 3]) },
                { 0 }, { 0 }
            };



            double[,] s1 = new double[1, 3] {
                { A[0, 1] * A[1, 2] - A[0, 2] * A[1, 1], A[1, 0] * A[0, 2] - A[0, 0] * A[1, 2], A[0, 0] * A[1, 1] - A[1, 0] * A[0, 1] }
            };


            double[,] s2 = new double[1, 3] {
                { A[2, 1] * A[3, 2] - A[2, 2] * A[3, 1], A[3, 0] * A[2, 2] - A[2, 0] * A[3, 2], A[2, 0] * A[3, 1] - A[3, 0] * A[2, 1] }
            };


            double[,] D = new double[6, 6] {
                { A[0, 0], A[0, 1], A[0, 2], 0, 0, 0 },
                { A[1, 0], A[1, 1], A[1, 2], 0, 0, 0 },
                { 0, 0, 0, A[2, 0], A[2, 1], A[2, 2] },
                { 0, 0, 0, A[3, 0], A[3, 1], A[3, 2] },
                { s1[0, 0], s1[0, 1], s1[0, 2], -s1[0, 0], -s1[0, 1], -s1[0, 2] },
                { s2[0, 0], s2[0, 1], s2[0, 2], -s2[0, 0], -s2[0, 1], -s2[0, 2] }
            };


            CvMat D_mat  = new CvMat(6, 6, MatrixType.F64C1, D);
            CvMat D1_mat = new CvMat(6, 1, MatrixType.F64C1, y);

            CvMat matAInv1 = new CvMat(6, 6, MatrixType.F64C1, D);

            matAInv1 = D_mat.Clone();
            D_mat.Inv(matAInv1);
            CvMat result = new CvMat(6, 1, MatrixType.F64C1);

            result = matAInv1 * D1_mat;

            //CvMat result= matAInv1 * D1_mat;

            //double Xb = result[0].Val0; double Yb = result[1].Val0; double Zb = result[2].Val0;
            double Xc = result[3].Val0; double Yc = result[4].Val0; double Zc = result[5].Val0;

            //如果映射像素颜色则运行时间很慢
            //CvScalar first_image_pixel;
            //int first_image_pixel_x = (int)Intersection[1];
            //int first_image_pixel_y = (int)Intersection[2];
            //first_image_pixel = Cv.Get2D(first_image, first_image_pixel_y, first_image_pixel_x);//
            //double[,] point_3D_location = new double[1, 10] { { contour_point_index, (Xb + Xc) / 2, (Yb + Yc) / 2, (Zb + Zc) / 2, 1, red, green, blue,left_image_number,right_image_number } };

            double[] point_3D_location;
            point_3D_location = new double[11] {
                contour_point_index, Xc, Yc, Zc, 1, red, green, blue, left_image_number, right_image_number, contour_index_model_operation_number
            };                                                                                                                                                                       //可视线上

            // point_3D_location = new double[1, 10] { { contour_point_index, Xb, Yb, Zb, 1, red, green, blue, left_image_number, right_image_number } };//参考图上
            //point_3D_location = new double[1, 11] { { contour_point_index, (Xb + Xc) / 2, (Yb + Yc) / 2, (Zb + Zc) / 2, 1, red, green, blue, left_image_number, right_image_number, contour_index_model_operation_number } };

            Cv.ReleaseMat(D_mat);
            Cv.ReleaseMat(D1_mat);
            Cv.ReleaseMat(matAInv1);
            Cv.ReleaseMat(result);
            //GC.Collect();
            return(point_3D_location);
        }
示例#4
0
        private CvMat Computecorrespondepilines(double[,] first_projcet, double[,] second_projcet, double[,] right_contour_point)//求两幅图的极线,此算法在窄基线情况下成立,返回的是位于基线上的两个点(因为求交的时候是用点作为参数,而不是A,B,C)
        {
            //double[,] Epiline_point = new double[4, right_contour_point.GetLength(1)];
            CvMat Epiline_point       = new CvMat(4, right_contour_point.GetLength(1), MatrixType.F64C1);
            CvMat correspondent_lines = new CvMat(3, right_contour_point.GetLength(1), MatrixType.F64C1);
            CvMat FundamentalMat      = new CvMat(3, 3, MatrixType.F64C1);

            //《根据投影矩阵求基础矩阵》网页来求基础矩阵
            double[,] M11 = new double[3, 3] {
                { first_projcet[0, 0], first_projcet[0, 1], first_projcet[0, 2] },
                { first_projcet[1, 0], first_projcet[1, 1], first_projcet[1, 2] },
                { first_projcet[2, 0], first_projcet[2, 1], first_projcet[2, 2] }
            };
            double[,] M21 = new double[3, 3] {
                { second_projcet[0, 0], second_projcet[0, 1], second_projcet[0, 2] },
                { second_projcet[1, 0], second_projcet[1, 1], second_projcet[1, 2] },
                { second_projcet[2, 0], second_projcet[2, 1], second_projcet[2, 2] }
            };

            double[,] m1 = new double[3, 1] {
                { first_projcet[0, 3] }, { first_projcet[1, 3] }, { first_projcet[2, 3] }
            };
            double[,] m2 = new double[3, 1] {
                { second_projcet[0, 3] }, { second_projcet[1, 3] }, { second_projcet[2, 3] }
            };

            CvMat M11_mat = new CvMat(3, 3, MatrixType.F64C1, M11);
            CvMat M21_mat = new CvMat(3, 3, MatrixType.F64C1, M21);
            CvMat m1_mat  = new CvMat(3, 1, MatrixType.F64C1, m1);
            CvMat m2_mat  = new CvMat(3, 1, MatrixType.F64C1, m2);

            CvMat M11_matInv = M11_mat.Clone();

            M11_mat.Inv(M11_matInv);
            CvMat temp3 = M21_mat * M11_matInv * m1_mat;

            double[,] temp3_arry = new double[3, 1] {
                { temp3[0, 0] }, { temp3[1, 0] }, { temp3[2, 0] }
            };

            double[,] m_arry = new double[3, 1];
            m_arry           = MatrixSubtration(m2, temp3_arry);
            CvMat m_mat = new CvMat(3, 1, MatrixType.F64C1, m_arry);

            double[,] mx_mat_arry = new double[3, 3] {
                { 0, -m_mat[2, 0], m_mat[1, 0] },
                { m_mat[2, 0], 0, -m_mat[0, 0] },
                { -m_mat[1, 0], m_mat[0, 0], 0 }
            };

            CvMat mx_mat = new CvMat(3, 3, MatrixType.F64C1, mx_mat_arry);

            //MessageBox.Show(m_mat.ToString());
            //MessageBox.Show(mx_mat.ToString());
            FundamentalMat = mx_mat * M21_mat * M11_matInv;


            CvMat matA = new CvMat(2, right_contour_point.GetLength(1), MatrixType.F64C1, right_contour_point); //将数组转换为矩阵,列表示点的个数

            Cv.ComputeCorrespondEpilines(matA, 2, FundamentalMat, out correspondent_lines);                     //correspondent_lines的列表示点的个数,经过证明图像指数是2,将其极线映射到参考图

            double A = 0, B = 0, C = 0;                                                                         //方程系数

            //double A1 = correspondent_lines[0, 0];
            //double B1 = correspondent_lines[1, 0];
            //double C1 = correspondent_lines[2, 0];
            //double A2 = correspondent_lines[0, 1];
            //double B2 = correspondent_lines[1, 1];
            //double C2 = correspondent_lines[2, 1];
            //double[] epipole_temp = new double[2];
            //epipole_temp[0] = (-1) * (B2 * C1 - B1 * C2) / (A1 * B2 - A2 * B1);
            //epipole_temp[1] = (-1) * (A2 * C1 - A1 * C2) / (A2 * B1 - A1* B2);
            //epipole = epipole_temp;

            for (int i = 0; i < right_contour_point.GetLength(1); i++)//一个轮廓点对应一条极线(有些轮廓点无相应图像的极线),一条极线要获得其上两个点,因为求交要用
            {
                A = correspondent_lines[0, i];
                B = correspondent_lines[1, i];
                C = correspondent_lines[2, i];

                Epiline_point[0, i] = 0;
                Epiline_point[1, i] = ((-C) / B);
                Epiline_point[2, i] = ((-C) / A);
                Epiline_point[3, i] = 0;


                //if (i != right_contour_point.GetLength(1) - 1)
                //{
                //    A1 = correspondent_lines[0, i];
                //    B1 = correspondent_lines[1, i];
                //    C1 = correspondent_lines[2, i];
                //    A2 = correspondent_lines[0, i + 1];
                //    B2 = correspondent_lines[1, i + 1];
                //    C2 = correspondent_lines[2, i + 1];
                //    epipole_temp = new double[2];
                //    epipole_temp[0] = (-1) * (B2 * C1 - B1 * C2) / (A1 * B2 - A2 * B1);
                //    epipole_temp[1] = (-1) * (A2 * C1 - A1 * C2) / (A2 * B1 - A1 * B2);
                //}
            }//轮询轮廓点的循环在此结束


            //MessageBox.Show(correspondent_lines.ToString());
            Cv.ReleaseMat(correspondent_lines);
            Cv.ReleaseMat(FundamentalMat);
            Cv.ReleaseMat(M11_mat);
            Cv.ReleaseMat(M21_mat);
            Cv.ReleaseMat(m1_mat);
            Cv.ReleaseMat(m2_mat);
            Cv.ReleaseMat(M11_matInv);
            Cv.ReleaseMat(temp3);
            Cv.ReleaseMat(m_mat);
            Cv.ReleaseMat(mx_mat);
            Cv.ReleaseMat(matA);
            return(Epiline_point);
        }