예제 #1
0
        public static ARGL_CONTEXT_SETTINGS arglSetupForCurrentContext(ARParam cparam, AR_PIXEL_FORMAT pixelFormat)
        {
            ARGL_CONTEXT_SETTINGS contextSettings;

            contextSettings         = new ARGL_CONTEXT_SETTINGS();
            contextSettings.arParam = cparam; // Copy it.
            contextSettings.zoom    = 1.0f;
            // Because of calloc used above, these are redundant.
            //contextSettings.rotate90 = contextSettings.flipH = contextSettings.flipV = false;
            //contextSettings.disableDistortionCompensation = false;
            //contextSettings.textureGeometryHasBeenSetup = false;
            //contextSettings.textureObjectsHaveBeenSetup = false;
            //contextSettings.textureDataReady = false;

            // This sets pixIntFormat, pixFormat, pixType, pixSize, and resets textureDataReady.
            arglPixelFormatSet(contextSettings, pixelFormat);

            // Set pixel buffer sizes to incoming image size, by default.
            if (!arglPixelBufferSizeSet(contextSettings, cparam.xsize, cparam.ysize))
            {
                Debug.Print("ARGL: Error setting pixel buffer size.\n");
                contextSettings = null;
                return(null);
            }

            return(contextSettings);
        }
예제 #2
0
        static int GetSquareForDatum(ARParam arParams, double[,] model, clsPoint pt)
        {
            var cpt = ModelToImageSpace(arParams, model, pt);
            var pt1 = ModelToImageSpace(arParams, model, new clsPoint(pt.x - 8, pt.y - 8));
            var pt2 = ModelToImageSpace(arParams, model, new clsPoint(pt.x + 8, pt.y - 8));
            var pt3 = ModelToImageSpace(arParams, model, new clsPoint(pt.x + 8, pt.y + 8));
            var pt4 = ModelToImageSpace(arParams, model, new clsPoint(pt.x - 8, pt.y + 8));
            var l1  = new clsLine(pt1, pt2);
            var l2  = new clsLine(pt2, pt3);
            var l3  = new clsLine(pt3, pt4);
            var l4  = new clsLine(pt4, pt1);

            double d  = 100;
            var    l  = new clsLine(cpt, new clsPoint(cpt.x - 1, cpt.y - 1));
            var    p1 = l.Intersect(l1);

            if (p1 != null && p1.Dist(cpt) < d)
            {
                d = p1.Dist(cpt);
            }
            p1 = l.Intersect(l2);
            if (p1 != null && p1.Dist(cpt) < d)
            {
                d = p1.Dist(cpt);
            }
            p1 = l.Intersect(l3);
            if (p1 != null && p1.Dist(cpt) < d)
            {
                d = p1.Dist(cpt);
            }
            p1 = l.Intersect(l4);
            if (p1 != null && p1.Dist(cpt) < d)
            {
                d = p1.Dist(cpt);
            }

            l  = new clsLine(cpt, new clsPoint(cpt.x + 1, cpt.y - 1));
            p1 = l.Intersect(l1);
            if (p1 != null && p1.Dist(cpt) < d)
            {
                d = p1.Dist(cpt);
            }
            p1 = l.Intersect(l2);
            if (p1 != null && p1.Dist(cpt) < d)
            {
                d = p1.Dist(cpt);
            }
            p1 = l.Intersect(l3);
            if (p1 != null && p1.Dist(cpt) < d)
            {
                d = p1.Dist(cpt);
            }
            p1 = l.Intersect(l4);
            if (p1 != null && p1.Dist(cpt) < d)
            {
                d = p1.Dist(cpt);
            }

            return((int)(d / Sqrt(2.0) + 0.5));
        }
예제 #3
0
        private static void GetCenterPointForDatum(clsPoint pt, double[,] model, ARParam arParams, int[] vp, Image <Gray, byte> grayImage, ref Emgu.CV.Util.VectorOfPointF centerPoints)
        {
            var cpt        = ModelToImageSpace(arParams, model, pt);
            var halfSquare = GetSquareForDatum(arParams, model, pt);

            if (halfSquare < 8)
            {
                return;
            }
            if (cpt.x - halfSquare < 0 || cpt.x + halfSquare > vp[2] || cpt.y - halfSquare <0 || cpt.y + halfSquare> vp[3])
            {
                return;
            }

            var    rect          = new Rectangle((int)cpt.x - halfSquare, (int)cpt.y - halfSquare, 2 * halfSquare, 2 * halfSquare);
            var    region        = new Mat(grayImage.Mat, rect);
            var    binaryRegion  = region.Clone();
            double otsuThreshold = CvInvoke.Threshold(region, binaryRegion, 0.0, 255.0, Emgu.CV.CvEnum.ThresholdType.Otsu);
            int    nonzero       = CvInvoke.CountNonZero(binaryRegion);
            var    square        = 4 * halfSquare * halfSquare;

            if (nonzero > square * 0.2f && nonzero < square * 0.8f)
            {
                centerPoints.Push(new PointF[] { new PointF((float)cpt.X, (float)cpt.Y) });
            }
        }
예제 #4
0
        private void button5_Click(object sender, EventArgs e)
        {
            OpenFileDialog myDlg = new OpenFileDialog();
            DialogResult   ret;
            string         myFile;
            int            i, j;

            myDlg.InitialDirectory = "C:\\Temp";
            myDlg.Filter           = "Dat Files (*.dat)|*.dat";
            ret = myDlg.ShowDialog();
            if (ret != DialogResult.OK)
            {
                return;
            }
            myFile = myDlg.FileName;


            FileStream   sr = File.Open(myFile, FileMode.Open, FileAccess.Read);
            BinaryReader br = new BinaryReader(sr);

            ARParam param = new ARParam();

            param.xsize = byteSwapInt(br.ReadInt32());
            param.ysize = byteSwapInt(br.ReadInt32());
            for (i = 0; i < 3; i++)
            {
                for (j = 0; j < 4; j++)
                {
                    param.mat[i, j] = byteSwapDouble(br.ReadDouble());
                }
            }
            for (i = 0; i < 17; i++)
            {
                if (br.PeekChar() != -1)
                {
                    param.dist_factor[i] = byteSwapDouble(br.ReadDouble());
                }
            }
            br.Close();
            sr.Close();

            System.Diagnostics.Debug.Print("Filename\t" + System.IO.Path.GetFileName(myFile));
            System.Diagnostics.Debug.Print("xSize\t" + param.xsize.ToString());
            System.Diagnostics.Debug.Print("ySize\t" + param.ysize.ToString());
            System.Diagnostics.Debug.Print("Mat[3][4]\t" + param.mat[0, 0].ToString() + "\t" + param.mat[0, 1].ToString() + "\t" + param.mat[0, 2].ToString() + "\t" + param.mat[0, 3].ToString());
            System.Diagnostics.Debug.Print("\t" + param.mat[1, 0].ToString() + "\t" + param.mat[1, 1].ToString() + "\t" + param.mat[1, 2].ToString() + "\t" + param.mat[1, 3].ToString());
            System.Diagnostics.Debug.Print("\t" + param.mat[2, 0].ToString() + "\t" + param.mat[2, 1].ToString() + "\t" + param.mat[2, 2].ToString() + "\t" + param.mat[2, 3].ToString());
            for (i = 0; i < 17; i++)
            {
                System.Diagnostics.Debug.Print("dist_factor[" + i.ToString() + "]\t" + param.dist_factor[i].ToString());
            }
        }
예제 #5
0
        private static void LoadCameraFromFile(string myCalibFile, out Mat cameraMatrix, out Mat distortionCoeffs)
        {
            int i, j;

            FileStream   sr = File.Open(myCalibFile, FileMode.Open, FileAccess.Read);
            BinaryReader br = new BinaryReader(sr);

            ARParam param = new ARParam();

            param.xsize = byteSwapInt(br.ReadInt32());
            param.ysize = byteSwapInt(br.ReadInt32());
            for (i = 0; i < 3; i++)
            {
                for (j = 0; j < 4; j++)
                {
                    param.mat[i, j] = byteSwapDouble(br.ReadDouble());
                }
            }
            for (i = 0; i < 9; i++)
            {
                param.dist_factor[i] = byteSwapDouble(br.ReadDouble());
            }
            br.Close();
            sr.Close();

            double s = param.dist_factor[8];

            param.mat[0, 0] *= s;
            param.mat[0, 1] *= s;
            param.mat[1, 0] *= s;
            param.mat[1, 1] *= s;

            cameraMatrix     = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            distortionCoeffs = new Mat(4, 1, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            double[] cameraArray = new double[9];
            for (j = 0; j < 3; j++)
            {
                for (i = 0; i < 3; i++)
                {
                    cameraArray[j * 3 + i] = param.mat[j, i];
                }
            }
            double[] distCoeffArray = new double[4];
            for (i = 0; i < 4; i++)
            {
                distCoeffArray[i] = param.dist_factor[i];
            }
            Marshal.Copy(cameraArray, 0, cameraMatrix.DataPointer, 9);
            Marshal.Copy(distCoeffArray, 0, distortionCoeffs.DataPointer, 4);
        }
예제 #6
0
        public static clsPoint ModelToImageSpace(ARParam param, double[,] trans, clsPoint p1)
        {
            double cx, cy, cz, hx, hy, h, sx, sy;

            cx = trans[0, 0] * p1.x + trans[0, 1] * p1.y + trans[0, 3];
            cy = trans[1, 0] * p1.x + trans[1, 1] * p1.y + trans[1, 3];
            cz = trans[2, 0] * p1.x + trans[2, 1] * p1.y + trans[2, 3];
            hx = param.mat[0, 0] * cx + param.mat[0, 1] * cy + param.mat[0, 2] * cz + param.mat[0, 3];
            hy = param.mat[1, 0] * cx + param.mat[1, 1] * cy + param.mat[1, 2] * cz + param.mat[1, 3];
            h  = param.mat[2, 0] * cx + param.mat[2, 1] * cy + param.mat[2, 2] * cz + param.mat[2, 3];
            if (h == 0.0)
            {
                return(new clsPoint(0, 0));
            }
            sx = hx / h;
            sy = hy / h;
            arParamIdeal2Observ(param.dist_factor, sx, sy, out double ox, out double oy, param.dist_function_version);
            return(new clsPoint(ox, oy));
        }
예제 #7
0
        private void WriteCalibrationFile(ARParam param)
        {
            SaveFileDialog myDlg = new SaveFileDialog();
            DialogResult   ret;
            string         myFile;
            int            i, j;

            myDlg.InitialDirectory = "C:\\Temp";
            myDlg.Filter           = "Dat Files (*.dat)|*.dat";
            ret = myDlg.ShowDialog();
            if (ret != DialogResult.OK)
            {
                return;
            }
            myFile = myDlg.FileName;

            FileStream   sw = File.Open(myFile, FileMode.Create, FileAccess.Write);
            BinaryWriter bw = new BinaryWriter(sw);

            bw.Write(byteSwapInt(param.xsize));
            bw.Write(byteSwapInt(param.ysize));

            for (i = 0; i < 3; i++)
            {
                for (j = 0; j < 4; j++)
                {
                    bw.Write(byteSwapDouble(param.mat[i, j]));
                }
            }
            for (i = 0; i < 9; i++)
            {
                bw.Write(byteSwapDouble(param.dist_factor[i]));
            }
            bw.Close();
            sw.Close();
        }
예제 #8
0
        static void arglCameraFrustumRH(ARParam cparam, double focalmin, double focalmax, double[] m_projection)
        {
            double[,] icpara = new double[3, 4];
            double[,] trans  = new double[3, 4];
            double[,] p      = new double[3, 3], q = new double[4, 4];
            int width, height;
            int i, j;

            width  = cparam.xsize;
            height = cparam.ysize;

            if (arParamDecompMat(cparam.mat, icpara, trans) < 0)
            {
                Debug.Print("arglCameraFrustum(): arParamDecompMat() indicated parameter error.\n");
                return;
            }
            for (i = 0; i < 4; i++)
            {
                icpara[1, i] = (height - 1) * (icpara[2, i]) - icpara[1, i];
            }

            for (i = 0; i < 3; i++)
            {
                for (j = 0; j < 3; j++)
                {
                    p[i, j] = icpara[i, j] / icpara[2, 2];
                }
            }
            q[0, 0] = (2.0 * p[0, 0] / (width - 1));
            q[0, 1] = (2.0 * p[0, 1] / (width - 1));
            q[0, 2] = -((2.0 * p[0, 2] / (width - 1)) - 1.0);
            q[0, 3] = 0.0;

            q[1, 0] = 0.0;
            q[1, 1] = -(2.0 * p[1, 1] / (height - 1));
            q[1, 2] = -((2.0 * p[1, 2] / (height - 1)) - 1.0);
            q[1, 3] = 0.0;

            q[2, 0] = 0.0;
            q[2, 1] = 0.0;
            q[2, 2] = (focalmax + focalmin) / (focalmin - focalmax);
            q[2, 3] = 2.0 * focalmax * focalmin / (focalmin - focalmax);

            q[3, 0] = 0.0;
            q[3, 1] = 0.0;
            q[3, 2] = -1.0;
            q[3, 3] = 0.0;

            for (i = 0; i < 4; i++)     // Row.
                                        // First 3 columns of the current row.
            {
                for (j = 0; j < 3; j++) // Column.
                {
                    m_projection[i + j * 4] = q[i, 0] * trans[0, j] +
                                              q[i, 1] * trans[1, j] +
                                              q[i, 2] * trans[2, j];
                }
                // Fourth column of the current row.
                m_projection[i + 3 * 4] = q[i, 0] * trans[0, 3] +
                                          q[i, 1] * trans[1, 3] +
                                          q[i, 2] * trans[2, 3] +
                                          q[i, 3];
            }
        }
예제 #9
0
        private static clsPoint ModelToImageSpace2(Matrix4 projection, Matrix4 modelview, int[] vp, ARParam arParams, clsPoint3d p1)
        {
            Vector4 vec;

            vec.X = (float)p1.X;
            vec.Y = (float)p1.Y;
            vec.Z = (float)p1.Z;
            vec.W = 1.0f;

            Vector4.Transform(ref vec, ref modelview, out vec);
            Vector4.Transform(ref vec, ref projection, out vec);

            if (vec.W > float.Epsilon || vec.W < -float.Epsilon)
            {
                vec.X /= vec.W;
                vec.Y /= vec.W;
                vec.Z /= vec.W;
            }

            var p3d = new clsPoint3d(vp[0] + (1.0f + vec.X) * vp[2] / 2.0f, vp[3] - (vp[1] + (1.0f + vec.Y) * vp[3] / 2.0f), (1.0f + vec.Z) / 2.0f);

            arParamIdeal2Observ(arParams.dist_factor, p3d.X, p3d.Y, out double ox, out double oy, arParams.dist_function_version);
            return(new clsPoint(ox, oy));
        }
예제 #10
0
        public static void DoDetection(string myCalibFile, string myImageFile)
        {
            int i, j;
            Mat myImage = Emgu.CV.CvInvoke.Imread(myImageFile, Emgu.CV.CvEnum.ImreadModes.Color);

            FileStream   sr = File.Open(myCalibFile, FileMode.Open, FileAccess.Read);
            BinaryReader br = new BinaryReader(sr);

            ARParam param = new ARParam();

            param.xsize = byteSwapInt(br.ReadInt32());
            param.ysize = byteSwapInt(br.ReadInt32());
            for (i = 0; i < 3; i++)
            {
                for (j = 0; j < 4; j++)
                {
                    param.mat[i, j] = byteSwapDouble(br.ReadDouble());
                }
            }
            for (i = 0; i < 9; i++)
            {
                param.dist_factor[i] = byteSwapDouble(br.ReadDouble());
            }
            br.Close();
            sr.Close();

            double s = param.dist_factor[8];

            param.mat[0, 0] *= s;
            param.mat[0, 1] *= s;
            param.mat[1, 0] *= s;
            param.mat[1, 1] *= s;

            Mat cameraMatrix     = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            Mat distortionCoeffs = new Mat(4, 1, Emgu.CV.CvEnum.DepthType.Cv64F, 1);

            double[] cameraArray = new double[9];
            for (j = 0; j < 3; j++)
            {
                for (i = 0; i < 3; i++)
                {
                    cameraArray[j * 3 + i] = param.mat[j, i];
                }
            }
            double[] distCoeffArray = new double[4];
            for (i = 0; i < 4; i++)
            {
                distCoeffArray[i] = param.dist_factor[i];
            }
            Marshal.Copy(cameraArray, 0, cameraMatrix.DataPointer, 9);
            Marshal.Copy(distCoeffArray, 0, distortionCoeffs.DataPointer, 4);

            Emgu.CV.Aruco.Dictionary myDict  = new Emgu.CV.Aruco.Dictionary(Emgu.CV.Aruco.Dictionary.PredefinedDictionaryName.Dict4X4_1000);
            Emgu.CV.Aruco.GridBoard  myBoard = new Emgu.CV.Aruco.GridBoard(2, 1, 0.08f, 0.005f, myDict, 33);


            Mat mappedImage = myImage.Clone();

            CvInvoke.Undistort(myImage, mappedImage, cameraMatrix, distortionCoeffs);

            //Mat rvecs = new Mat(3, 1, Emgu.CV.CvEnum.DepthType.Cv32F, 1);
            //Mat tvecs = new Mat(3, 1, Emgu.CV.CvEnum.DepthType.Cv32F, 1);

            Emgu.CV.Aruco.DetectorParameters myParams = Emgu.CV.Aruco.DetectorParameters.GetDefault();
            myParams.CornerRefinementMethod = Emgu.CV.Aruco.DetectorParameters.RefinementMethod.Subpix;
            //myParams.AdaptiveThreshWinSizeStep = 10;
            //myParams.AdaptiveThreshWinSizeMax = 23;
            //myParams.AdaptiveThreshWinSizeMin = 3;
            //myParams.MaxMarkerPerimeterRate = 4.0;
            //myParams.MinMarkerPerimeterRate = 0.03;
            //myParams.AdaptiveThreshConstant = 7;
            //myParams.PolygonalApproxAccuracyRate = 0.1;
            //myParams.MinCornerDistanceRate = 0.05;
            //myParams.MinDistanceToBorder = 3;
            //myParams.MinMarkerDistanceRate = 0.05;
            //myParams.CornerRefinementMinAccuracy = 0.1;
            //myParams.CornerRefinementWinSize = 5;
            //myParams.CornerRefinementMaxIterations = 30;
            myParams.MarkerBorderBits = 2;
            //myParams.PerspectiveRemoveIgnoredMarginPerCell = 0.13;
            //myParams.PerspectiveRemovePixelPerCell = 8;
            //myParams.MaxErroneousBitsInBorderRate = 0.35;
            //myParams.MinOtsuStdDev = 5.0;
            //myParams.ErrorCorrectionRate = 0.6;

            using (Emgu.CV.Util.VectorOfInt ids = new Emgu.CV.Util.VectorOfInt())
                using (Emgu.CV.Util.VectorOfVectorOfPointF corners = new Emgu.CV.Util.VectorOfVectorOfPointF())
                    using (Emgu.CV.Util.VectorOfVectorOfPointF rejected = new Emgu.CV.Util.VectorOfVectorOfPointF()) {
                        Emgu.CV.Aruco.ArucoInvoke.DetectMarkers(mappedImage, myDict, corners, ids, myParams, rejected);

                        if (ids.Size > 0)
                        {
                            //Emgu.CV.Aruco.ArucoInvoke.RefineDetectedMarkers(mappedImage, myBoard, corners, ids, rejected, null, null, 10, 3, true, null, myParams);
                            using (Mat rvecs = new Mat(3, 1, Emgu.CV.CvEnum.DepthType.Cv32F, 1))
                                using (Mat tvecs = new Mat(3, 1, Emgu.CV.CvEnum.DepthType.Cv32F, 1)) {
                                    Emgu.CV.Aruco.ArucoInvoke.EstimatePoseSingleMarkers(corners, 0.08f, cameraMatrix, distortionCoeffs, rvecs, tvecs);
                                    for (i = 0; i < rvecs.Rows; i++)
                                    {
                                        Emgu.CV.Aruco.ArucoInvoke.DrawAxis(mappedImage, cameraMatrix, distortionCoeffs, rvecs.Row(i), tvecs.Row(i), 0.05f);
                                    }
                                }

                            Emgu.CV.Aruco.ArucoInvoke.DrawDetectedMarkers(mappedImage, corners, ids, new Emgu.CV.Structure.MCvScalar(0.0, 200.0, 50.0));
                            mappedImage.Save("C:\\Temp\\ArucoDetect.png");
                        }
                        else if (rejected.Size > 0)
                        {
                            PointF[][] myPts = rejected.ToArrayOfArray();
                            for (i = 0; i < myPts.GetUpperBound(0); i++)
                            {
                                CvInvoke.Line(mappedImage, new Point((int)myPts[i][0].X, (int)myPts[i][0].Y), new Point((int)myPts[i][1].X, (int)myPts[i][1].Y), new Emgu.CV.Structure.MCvScalar(0.0, 0.5, 200.0), 2);
                                CvInvoke.Line(mappedImage, new Point((int)myPts[i][1].X, (int)myPts[i][1].Y), new Point((int)myPts[i][2].X, (int)myPts[i][2].Y), new Emgu.CV.Structure.MCvScalar(0.0, 0.5, 200.0), 2);
                                CvInvoke.Line(mappedImage, new Point((int)myPts[i][2].X, (int)myPts[i][2].Y), new Point((int)myPts[i][3].X, (int)myPts[i][3].Y), new Emgu.CV.Structure.MCvScalar(0.0, 0.5, 200.0), 2);
                                CvInvoke.Line(mappedImage, new Point((int)myPts[i][3].X, (int)myPts[i][3].Y), new Point((int)myPts[i][0].X, (int)myPts[i][0].Y), new Emgu.CV.Structure.MCvScalar(0.0, 0.5, 200.0), 2);
                            }
                            mappedImage.Save("C:\\Temp\\ArucoReject.png");
                        }
                    }
        }