Ejemplo n.º 1
0
 public static void DrawCornersOnImage(Mat image, Emgu.CV.Util.VectorOfPointF cornerPoints, System.Drawing.Color col)
 {
     for (int i = 0; i < cornerPoints.Size; i++)
     {
         CvInvoke.Line(image, new Point((int)cornerPoints[i].X, (int)cornerPoints[i].Y), new Point((int)cornerPoints[i].X, (int)cornerPoints[i].Y), new Bgr(col).MCvScalar, 1);
     }
 }
Ejemplo n.º 2
0
        public static Emgu.CV.Util.VectorOfPoint ToVectorOfPoint(this Emgu.CV.Util.VectorOfPointF vectorOfPointF)
        {
            List <Point> pts = new List <Point>();

            for (int i = 0; i < vectorOfPointF.Size; i++)
            {
                pts.Add(Point.Round(vectorOfPointF[i]));
            }
            return(new Emgu.CV.Util.VectorOfPoint(pts.ToArray()));
        }
Ejemplo n.º 3
0
        public static Emgu.CV.Util.VectorOfPointF DetectEllipses(Image <Gray, byte> grayImage, Mat imageCopy)
        {
            var centerPoints  = new Emgu.CV.Util.VectorOfPointF();
            var contours      = new Emgu.CV.Util.VectorOfVectorOfPoint();
            Mat heirarchy     = null;
            var grayImageCopy = grayImage.Clone();
            var res           = CvInvoke.Threshold(grayImage, grayImageCopy, 170, 255, Emgu.CV.CvEnum.ThresholdType.Binary);

            //CvInvoke.Imwrite(Path.GetDirectoryName(myFile) + "\\" + Path.GetFileNameWithoutExtension(myFile) + "-threshold" + Path.GetExtension(myFile), grayImageCopy, new KeyValuePair<Emgu.CV.CvEnum.ImwriteFlags, int>(Emgu.CV.CvEnum.ImwriteFlags.JpegQuality, 95));
            grayImageCopy._Not();

            CvInvoke.FindContours(grayImageCopy, contours, heirarchy, Emgu.CV.CvEnum.RetrType.Ccomp, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);
            // var circles = CvInvoke.HoughCircles(grayImage, Emgu.CV.CvEnum.HoughType.Gradient, 1, grayImage.Rows / 16);
            if (contours.Size > 0)
            {
                double largestArea = 0;
                for (int i = 0; i < contours.Size; i++)
                {
                    var contour = contours[i];
                    if (contour.Size > 4)
                    {
                        var rect   = CvInvoke.FitEllipse(contour);
                        var area   = rect.Size.Width * rect.Size.Height;
                        var width  = rect.Size.Width > rect.Size.Height ? rect.Size.Width : rect.Size.Height;
                        var height = rect.Size.Width > rect.Size.Height ? rect.Size.Height : rect.Size.Width;
                        if (area > 1000 && width / height < 3)
                        {
                            var averageDist  = AverageDistanceToEllipse(contour, rect);
                            var furthestDist = FurthestDistanceToEllipse(contour, rect);
                            if (averageDist < 1.5 && furthestDist < 4)
                            {
                                if (area > largestArea)
                                {
                                    largestArea = area;
                                }
                                centerPoints.Push(new PointF[] { rect.Center });
                                DrawContoursOnImage(imageCopy, contours[i]);
                                // CvInvoke.Ellipse(imageCopy, rect, new Bgr(System.Drawing.Color.Red).MCvScalar);
                                CvInvoke.Line(imageCopy, new Point((int)rect.GetVertices()[0].X, (int)rect.GetVertices()[0].Y), new Point((int)rect.GetVertices()[1].X, (int)rect.GetVertices()[1].Y), new Bgr(System.Drawing.Color.Red).MCvScalar);
                                CvInvoke.Line(imageCopy, new Point((int)rect.GetVertices()[1].X, (int)rect.GetVertices()[1].Y), new Point((int)rect.GetVertices()[2].X, (int)rect.GetVertices()[2].Y), new Bgr(System.Drawing.Color.Red).MCvScalar);
                                CvInvoke.Line(imageCopy, new Point((int)rect.GetVertices()[2].X, (int)rect.GetVertices()[2].Y), new Point((int)rect.GetVertices()[3].X, (int)rect.GetVertices()[3].Y), new Bgr(System.Drawing.Color.Red).MCvScalar);
                                CvInvoke.Line(imageCopy, new Point((int)rect.GetVertices()[3].X, (int)rect.GetVertices()[3].Y), new Point((int)rect.GetVertices()[0].X, (int)rect.GetVertices()[0].Y), new Bgr(System.Drawing.Color.Red).MCvScalar);
                            }
                        }
                    }
                }
            }

            return(centerPoints);
        }
Ejemplo n.º 4
0
        static bool CheckConnectedComponents(Mat grayImage)
        {
            // Threshold using Otsu bi-modal (black&white) assumption
            Mat    binaryImage   = grayImage.Clone();
            double otsuThreshold = CvInvoke.Threshold(grayImage, binaryImage, 0.0, 255.0, Emgu.CV.CvEnum.ThresholdType.Otsu | Emgu.CV.CvEnum.ThresholdType.Binary);

            // dilate to connect two squares
            Mat kernel = new Mat();

            CvInvoke.Dilate(binaryImage, binaryImage, kernel, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);

            CvInvoke.Imwrite("C:\\Temp\\Dilate.png", binaryImage, new KeyValuePair <Emgu.CV.CvEnum.ImwriteFlags, int>(Emgu.CV.CvEnum.ImwriteFlags.PngCompression, 3));

            // compute number of labels (should be 2: 0 for background, 1 for white)
            Mat labelRegion    = new Mat(new System.Drawing.Size(binaryImage.Width, binaryImage.Height), Emgu.CV.CvEnum.DepthType.Cv32S, 1);
            Mat statistics     = new Mat();
            Mat centroids      = new Mat();
            var numberOfLabels = CvInvoke.ConnectedComponentsWithStats(binaryImage, labelRegion, statistics, centroids, Emgu.CV.CvEnum.LineType.EightConnected, Emgu.CV.CvEnum.DepthType.Cv32S);

            Console.WriteLine(" - Number of labels: %d\n", numberOfLabels);

            if (numberOfLabels != 2)
            {
                return(false);
            }

            // compute centers of background and foreground (should also be close to image center)
            Emgu.CV.Util.VectorOfPoint  imageCentre = new Emgu.CV.Util.VectorOfPoint(new Point [] { new Point((int)(grayImage.Cols / 2.0f), (int)(grayImage.Rows / 2.0f)) });
            Emgu.CV.Util.VectorOfPointF blackCenter = new Emgu.CV.Util.VectorOfPointF(new PointF[] { new PointF((float)centroids.GetDoubleValue(0, 0), (float)centroids.GetDoubleValue(0, 1)) });
            Emgu.CV.Util.VectorOfPointF whiteCenter = new Emgu.CV.Util.VectorOfPointF(new PointF[] { new PointF((float)centroids.GetDoubleValue(1, 0), (float)centroids.GetDoubleValue(1, 1)) });

            var blackCentroidDistance = CvInvoke.Norm(blackCenter, imageCentre, Emgu.CV.CvEnum.NormType.L2);
            var whiteCentroidDistance = CvInvoke.Norm(whiteCenter, imageCentre, Emgu.CV.CvEnum.NormType.L2);

            for (var label = 0; label < numberOfLabels; label++)
            {
                Console.WriteLine(" - [%d] centroid at (%.1lf,%.1lf)\n", label, (float)centroids.GetDoubleValue(label, 0), (float)centroids.GetDoubleValue(label, 1));
            }

            return(numberOfLabels == 2 && blackCentroidDistance < 10.0 && whiteCentroidDistance < 10.0);
        }
Ejemplo n.º 5
0
        public void loadCalibrationNow(string file = "cameraMat.txt")
        {
            FileStorage fs        = new FileStorage(file, FileStorage.Mode.Read);
            Mat         cameraMat = new Mat();

            fs.GetFirstTopLevelNode().ReadMat(cameraMat);
            fs = new FileStorage("distort.txt", FileStorage.Mode.Read);
            Mat distortMat = new Mat();

            fs.GetFirstTopLevelNode().ReadMat(distortMat);

            cameraPar = new float[9];
            double[] cameraParDouble = new Double[9];
            cameraMat.CopyTo(cameraParDouble);
            for (int i = 0; i < 9; i++)
            {
                cameraPar[i] = (float)cameraParDouble[i];
            }

            const int width       = 4;                       //5 //width of chessboard no. squares in width - 1
            const int height      = 4;                       //5 // heght of chess board no. squares in heigth - 1
            Size      patternSize = new Size(width, height); //size of chess board to be detected

            MCvPoint3D32f[] corners_object_list = new MCvPoint3D32f[width * height];
            PointF[]        corners_points_list = new PointF[width * height];

            for (int i = 0; i < height; i++)
            {
                for (int j = 0; j < width; j++)
                {
                    //corners_object_list[width * i + j] = new MCvPoint3D32f((j) * 31, (height -1  - i) * 31, 0 );
                    corners_object_list[width * i + j] = new MCvPoint3D32f((j) * -31 - 289.916f + (float)Form1.currentX0.Value, (height - 1 - i) * -31 - 129.96f + (float)Form1.currentY0.Value, 0);
                }
            }


            var  output         = new Emgu.CV.Util.VectorOfPointF();
            Size smallerPicSize = new Size(800, 600);

            render.currentBMP.Save("PicCalibrate.bmp");

            Mat smallerPic = new Mat("PicCalibrate.bmp", LoadImageType.Unchanged);

            bool found = CvInvoke.FindChessboardCorners(smallerPic, patternSize, output);//find chessboard

            if (found == false)
            {
                MessageBox.Show("fail");
                return;
            }

            Console.WriteLine("found:" + found);
            corners_points_list = output.ToArray();


            Mat rotationVec    = new Mat();
            Mat translationVec = new Mat();

            bool solved = CvInvoke.SolvePnP(corners_object_list, corners_points_list, cameraMat, distortMat, rotationVec, translationVec);

            //1 by 3 array of rotate Matrix
            rotateArr = new float[9];
            Mat rotationMatrix = new Mat();

            CvInvoke.Rodrigues(rotationVec, rotationMatrix);
            double[] rotateArrDouble = new double[9];
            rotationMatrix.CopyTo(rotateArrDouble);
            for (int i = 0; i < 9; i++)
            {
                rotateArr[i] = (float)rotateArrDouble[i];
            }

            //1 by 3 array of translate Matrix
            translateArr = new float[3];
            double[] translateArrDouble = new double[3];
            translationVec.CopyTo(translateArrDouble);
            for (int i = 0; i < 3; i++)
            {
                translateArr[i] = (float)translateArrDouble[i];
            }

            System.IO.StreamWriter swTranslate = new StreamWriter("transArr.txt");

            for (int i = 0; i < 3; i++)
            {
                swTranslate.WriteLine(translateArr[i]);
            }

            swTranslate.Close();

            System.IO.StreamWriter swRot = new StreamWriter("rotArr.txt");


            for (int i = 0; i < 9; i++)
            {
                swRot.WriteLine(rotateArr[i]);
            }

            swRot.Close();
        }
Ejemplo n.º 6
0
        private void CalibrateCamera()
        {
            const int width       = 5;                       //5 //width of chessboard no. squares in width - 1
            const int height      = 5;                       //5 // heght of chess board no. squares in heigth - 1
            Size      patternSize = new Size(width, height); //size of chess board to be detected

            MCvPoint3D32f[][] corners_object_list = new MCvPoint3D32f[6][];
            PointF[][]        corners_points_list = new PointF[6][];


            for (int k = 0; k < 6; k++)
            {
                corners_object_list[k] = new MCvPoint3D32f[width * height];
                for (int i = 0; i < 5; i++)
                {
                    for (int j = 0; j < 5; j++)
                    {
                        corners_object_list[k][5 * i + j] = new MCvPoint3D32f((4 - i) * 29, (4 - j) * 29, 8);
                    }
                }
            }



            var  output         = new Emgu.CV.Util.VectorOfPointF();
            Size smallerPicSize = new Size(816, 612);

            for (int k = 1; k <= 6; k++)
            {
                Mat imgCam     = new Mat(k + ".jpg", LoadImageType.Unchanged);//load picture of chessboard
                Mat smallerPic = new Mat();

                Size PicSize = new Size(3264, 2448);
                CvInvoke.Resize(imgCam, smallerPic, smallerPicSize);

                if (k == 1)
                {
                    smallerPic.Save("small1.jpg");
                }

                //CvInvoke.Imshow("small", smallerPic);

                bool found = CvInvoke.FindChessboardCorners(smallerPic, patternSize, output);//find chessboard
                Console.WriteLine("found:" + found);
                corners_points_list[k - 1] = output.ToArray();
            }

            for (int i = 0; i < output.Size; i++)
            {
                Console.WriteLine(corners_points_list[0].GetValue(i));
            }

            Mat cameraMat  = new Mat();
            Mat distorCoef = new Mat();

            Mat[] rotationVec = new Mat[6];

            Mat[] translationVec = new Mat[6];
            for (int k = 0; k < 6; k++)
            {
                translationVec[k] = new Mat();
                rotationVec[k]    = new Mat();
            }

            MCvTermCriteria criteria = new MCvTermCriteria();

            double rms = CvInvoke.CalibrateCamera(corners_object_list, corners_points_list, smallerPicSize, cameraMat, distorCoef, CalibType.RationalModel, criteria, out rotationVec, out translationVec);


            cameraPar = new float[9];
            double[] cameraParDouble = new Double[9];
            cameraMat.CopyTo(cameraParDouble);
            for (int i = 0; i < 9; i++)
            {
                cameraPar[i] = (float)cameraParDouble[i];
            }


            //1 by 14 array of distortion coeff, only first 8 important
            double[] distortArr = new double[14];
            distorCoef.CopyTo(distortArr);

            //1 by 3 array of rotate Matrix
            rotateArr = new float[9];
            Mat rotationMatrix = new Mat();

            //need to flip stuff
            //double[] rv = new double[3];
            //rotationVec[0].CopyTo(rv);
            //rv[1] = -1.0f * rv[1]; rv[2] = -1.0f * rv[2];
            //rotationVec[0].SetTo(rv);
            CvInvoke.Rodrigues(rotationVec[0], rotationMatrix);
            double[] rotateArrDouble = new double[9];
            rotationMatrix.CopyTo(rotateArrDouble);
            for (int i = 0; i < 9; i++)
            {
                rotateArr[i] = (float)rotateArrDouble[i];
            }


            //1 by 3 array of translate Matrix
            translateArr = new float[3];
            double[] translateArrDouble = new double[3];
            translationVec[0].CopyTo(translateArrDouble);
            for (int i = 0; i < 3; i++)
            {
                translateArr[i] = (float)translateArrDouble[i];
            }


            for (int i = 0; i < 3; i++)
            {
                Console.WriteLine(rotateArr[i]);
            }

            for (int i = 0; i < 3; i++)
            {
                Console.WriteLine(translateArr[i]);
            }

            //CvInvoke.Imshow("chessboard", imgCam);

            Console.WriteLine(rms);

            FileStorage fs = new FileStorage("cameraMat.txt", FileStorage.Mode.Write);

            fs.Write(cameraMat);
            fs.ReleaseAndGetString();
            fs = new FileStorage("distort.txt", FileStorage.Mode.Write);
            fs.Write(distorCoef);
            fs.ReleaseAndGetString();
        }
Ejemplo n.º 7
0
        public void loadCalibration(string file = "cameraMat.txt")
        {
            FileStorage fs        = new FileStorage(file, FileStorage.Mode.Read);
            Mat         cameraMat = new Mat();

            fs.GetFirstTopLevelNode().ReadMat(cameraMat);
            fs = new FileStorage("distort.txt", FileStorage.Mode.Read);
            Mat distortMat = new Mat();

            fs.GetFirstTopLevelNode().ReadMat(distortMat);

            cameraPar = new float[9];
            double[] cameraParDouble = new Double[9];
            cameraMat.CopyTo(cameraParDouble);
            for (int i = 0; i < 9; i++)
            {
                cameraPar[i] = (float)cameraParDouble[i];
            }

            const int width       = 5;                       //5 //width of chessboard no. squares in width - 1
            const int height      = 5;                       //5 // heght of chess board no. squares in heigth - 1
            Size      patternSize = new Size(width, height); //size of chess board to be detected

            MCvPoint3D32f[] corners_object_list = new MCvPoint3D32f[width * height];
            PointF[]        corners_points_list = new PointF[width * height];

            for (int i = 0; i < 5; i++)
            {
                for (int j = 0; j < 5; j++)
                {
                    corners_object_list[5 * i + j] = new MCvPoint3D32f((4 - i) * 29, (4 - j) * 29, 5);
                }
            }


            var  output         = new Emgu.CV.Util.VectorOfPointF();
            Size smallerPicSize = new Size(816, 612);


            Mat imgCam     = new Mat("1.jpg", LoadImageType.Unchanged);//load picture of chessboard
            Mat smallerPic = new Mat();

            Size PicSize = new Size(3264, 2448);

            CvInvoke.Resize(imgCam, smallerPic, smallerPicSize);

            bool found = CvInvoke.FindChessboardCorners(smallerPic, patternSize, output);//find chessboard

            Console.WriteLine("found:" + found);
            corners_points_list = output.ToArray();


            Mat rotationVec    = new Mat();
            Mat translationVec = new Mat();

            CvInvoke.SolvePnP(corners_object_list, corners_points_list, cameraMat, distortMat, rotationVec, translationVec);

            //1 by 3 array of rotate Matrix
            rotateArr = new float[9];
            Mat rotationMatrix = new Mat();

            CvInvoke.Rodrigues(rotationVec, rotationMatrix);
            double[] rotateArrDouble = new double[9];
            rotationMatrix.CopyTo(rotateArrDouble);
            for (int i = 0; i < 9; i++)
            {
                rotateArr[i] = (float)rotateArrDouble[i];
            }

            //1 by 3 array of translate Matrix
            translateArr = new float[3];
            double[] translateArrDouble = new double[3];
            translationVec.CopyTo(translateArrDouble);
            for (int i = 0; i < 3; i++)
            {
                translateArr[i] = (float)translateArrDouble[i];
            }
        }
Ejemplo n.º 8
0
        public static void initBundleAdjust(string myCalibFile, int xsize, int ysize, int npoints, float[] points, int nMeasurements, float[] matrices)
        {
            // camera related parameters
            Mat cameraMat;
            Mat distCoeffs;

            LoadCameraFromFile(myCalibFile, out cameraMat, out distCoeffs);

            Size cameraRes = new Size(xsize, ysize);

            List <Emgu.CV.Structure.MCvPoint3D32f> objectPoints = new List <Emgu.CV.Structure.MCvPoint3D32f>();

            objectPoints.Add(new Emgu.CV.Structure.MCvPoint3D32f(points[0], points[1], points[2]));
            var points3d         = new Emgu.CV.Util.VectorOfPoint3D32F(objectPoints.ToArray());
            var points2d         = new Emgu.CV.Util.VectorOfVectorOfPointF();
            var visibility       = new Emgu.CV.Util.VectorOfVectorOfInt();
            var cameraMatrix     = new Emgu.CV.Util.VectorOfMat();
            var R_true           = new Emgu.CV.Util.VectorOfMat();
            var T_true           = new Emgu.CV.Util.VectorOfMat();
            var distortionCoeffs = new Emgu.CV.Util.VectorOfMat();

            Emgu.CV.Structure.MCvTermCriteria criteria = new Emgu.CV.Structure.MCvTermCriteria(70, 1e-10);

            // define cameras
            for (int i = 0; i < nMeasurements; i++)
            {
                cameraMatrix.Push(cameraMat);
                distortionCoeffs.Push(distCoeffs);

                Mat      _R_true     = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
                double[] matrixArray = new double[9];
                for (int j = 0; j < 3; j++)
                {
                    for (int k = 0; k < 3; k++)
                    {
                        matrixArray[j * 3 + i] = matrices[i * 12 + j * 3 + k];
                    }
                }
                Marshal.Copy(matrixArray, 0, _R_true.DataPointer, 9);
                R_true.Push(_R_true);

                Mat      _T_true     = new Mat(3, 1, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
                double[] vectorArray = new double[3];
                for (int j = 0; j < 3; j++)
                {
                    vectorArray[j] = matrices[i * 12 + 9 + j];
                }
                Marshal.Copy(vectorArray, 0, _T_true.DataPointer, 3);
                T_true.Push(_T_true);
            }

            // project points to image coordinates
            for (int i = 0; i < nMeasurements; i++)
            {
                // project
                var imagePoints2 = new Emgu.CV.Util.VectorOfPointF();
                Emgu.CV.CvInvoke.ProjectPoints(points3d, R_true[i], T_true[i], cameraMatrix[i], distortionCoeffs[i], imagePoints2);

                // check if the point is in camera shot
                Emgu.CV.Util.VectorOfInt vis = new Emgu.CV.Util.VectorOfInt(1);
                // if the image point is within camera resolution then the point is visible
                if ((0 <= imagePoints2[0].X) && (imagePoints2[0].X <= cameraRes.Width) &&
                    (0 <= imagePoints2[0].Y) && (imagePoints2[0].Y <= cameraRes.Height))
                {
                    vis.Push(new int[] { 1 });
                }
                // else, the point is not visible
                else
                {
                    vis.Push(new int[] { 0 });
                }
                points2d.Push(imagePoints2);
                visibility.Push(vis);
            }
            //Emgu.CV.
            //	cv::LevMarqSparse lv;
            //lv.bundleAdjust(points_true, imagePoints, visibility, cameraMatrix, R_true, T_true, distCoeffs, criteria);
        }
Ejemplo n.º 9
0
        private void button1_Click(object sender, EventArgs e)
        {
            var myDlg = new FolderBrowserDialog();

            myDlg.SelectedPath = "C:\\Customer\\Stannah\\Photogrammetry\\Photos";
            var ret = myDlg.ShowDialog();

            if (ret != DialogResult.OK)
            {
                return;
            }
            var myFolder = myDlg.SelectedPath;
            var myFiles = new List <string>();
            int myWidth = 0, myHeight = 0, nImages = 0, nCount = 0;

            foreach (string myFile in Directory.GetFiles(myFolder))
            {
                if (Path.GetFileNameWithoutExtension(myFile).ToLower().StartsWith("calibration") && myFile.ToLower().EndsWith(".png") && !myFile.ToLower().Contains("-adj.png"))
                {
                    if (Path.GetFileNameWithoutExtension(myFile).Contains("17"))
                    {
                        continue;
                    }
                    nImages = nImages + 1;
                    myFiles.Add(myFile);
                    if (myWidth == 0)
                    {
                        Image myImage = Image.FromFile(myFile);
                        myWidth  = myImage.Width;
                        myHeight = myImage.Height;
                    }
                }
            }

            // ARToolKitFunctions.Instance.arwInitialiseAR();
            ARToolKitFunctions.Instance.arwInitChessboardCorners(17, 13, 20, 3264, 2448, nImages);

            float[] corners = new float[442];
            int     cornerCount;

            //string cornerFile = "C:\\Temp\\CornersARToolkit.txt";
            //if (File.Exists(cornerFile)) {
            //    try {
            //        File.Delete(cornerFile);
            //    }
            //    catch (Exception ex) {
            //        string s = ex.ToString();
            //    }
            //}

            myFiles.Sort(new AlphaNumericCompare());

            //StreamWriter sw = new StreamWriter(cornerFile);
            foreach (string myFile in myFiles)
            {
                var image = new Image <Gray, byte>(myFile);
                var size  = image.Width * image.Height;

                var cornerPoints = new Emgu.CV.Util.VectorOfPointF();
                var mBoardSize   = new Size(13, 17);
                var res          = CvInvoke.FindChessboardCorners(image, mBoardSize, cornerPoints);

                byte[] imageBytes = new Byte[size];
                System.Buffer.BlockCopy(image.Data, 0, imageBytes, 0, size);

                //byte[] imageBytes = ImageToGrayscaleByteArray((Bitmap)Image.FromFile(myFile));

                int result = ARToolKitFunctions.Instance.arwFindChessboardCorners(corners, out cornerCount, imageBytes);

                var imagePoints = new Emgu.CV.Util.VectorOfPointF();
                int l           = 0;
                for (int i = 0; i < 17; i++)
                {
                    for (int j = 0; j < 13; j++)
                    {
                        //sw.WriteLine(corners[l * 2].ToString() + '\t' + corners[l * 2 + 1].ToString());
                        imagePoints.Push(new PointF[] { new PointF(corners[l * 2], corners[l * 2 + 1]) });
                        l++;
                    }
                }

                if (result == 1)
                {
                    if (imagePoints.Size > 0)
                    {
                        Mat imageCopy = Emgu.CV.CvInvoke.Imread(myFile, Emgu.CV.CvEnum.ImreadModes.Color);
                        if (imagePoints.Size > 0)
                        {
                            mdlEmguDetection.DrawCornersOnImage(imageCopy, imagePoints, System.Drawing.Color.Green);
                        }
                        CvInvoke.Imwrite(Path.GetDirectoryName(myFile) + "\\Corners-" + Path.GetFileNameWithoutExtension(myFile) + ".png", imageCopy, new KeyValuePair <Emgu.CV.CvEnum.ImwriteFlags, int>(Emgu.CV.CvEnum.ImwriteFlags.PngCompression, 3));
                    }

                    cornerCount = ARToolKitFunctions.Instance.arwCaptureChessboardCorners();
                    System.Diagnostics.Debug.Print("Processed image " + cornerCount.ToString());
                    if (cornerCount == nImages)
                    {
                        nCount = nCount + 1;
                        float[] reprojectionErrors = new float[nImages];
                        float   reprojectionError  = ARToolKitFunctions.Instance.arwCalibChessboardCorners(nImages, "C:\\Temp\\Calib.dat", out reprojectionErrors);

                        System.Diagnostics.Debug.Print("Total reprojection error: " + reprojectionError.ToString());
                        for (int i = 0; i < reprojectionErrors.Length; i++)
                        {
                            System.Diagnostics.Debug.Print("Reprojection error " + (i + 1).ToString() + ": " + reprojectionErrors[i].ToString());
                        }
                    }
                }
                else
                {
                    System.Diagnostics.Debug.Print("Failed to process image " + myFile);
                }
            }
            //sw.Close();
        }
Ejemplo n.º 10
0
        private static void GetCenterPointForDatum(clsPoint pt, double[,] model, ARParam arParams, int[] vp, Image <Gray, byte> grayImage, ref Emgu.CV.Util.VectorOfPointF centerPoints)
        {
            var cpt        = ModelToImageSpace(arParams, model, pt);
            var halfSquare = GetSquareForDatum(arParams, model, pt);

            if (halfSquare < 8)
            {
                return;
            }
            if (cpt.x - halfSquare < 0 || cpt.x + halfSquare > vp[2] || cpt.y - halfSquare <0 || cpt.y + halfSquare> vp[3])
            {
                return;
            }

            var    rect          = new Rectangle((int)cpt.x - halfSquare, (int)cpt.y - halfSquare, 2 * halfSquare, 2 * halfSquare);
            var    region        = new Mat(grayImage.Mat, rect);
            var    binaryRegion  = region.Clone();
            double otsuThreshold = CvInvoke.Threshold(region, binaryRegion, 0.0, 255.0, Emgu.CV.CvEnum.ThresholdType.Otsu);
            int    nonzero       = CvInvoke.CountNonZero(binaryRegion);
            var    square        = 4 * halfSquare * halfSquare;

            if (nonzero > square * 0.2f && nonzero < square * 0.8f)
            {
                centerPoints.Push(new PointF[] { new PointF((float)cpt.X, (float)cpt.Y) });
            }
        }
Ejemplo n.º 11
0
        public static void DetectMarkers(string myFile)
        {
            var grayImage = new Image <Gray, byte>(myFile);
            //CheckConnectedComponents(grayImage.Mat);

            Mat imageCopy = Emgu.CV.CvInvoke.Imread(myFile, Emgu.CV.CvEnum.ImreadModes.Color);

            byte[] grayImageBytes = new byte[grayImage.Data.Length];
            Buffer.BlockCopy(grayImage.Data, 0, grayImageBytes, 0, grayImage.Data.Length);
            myVideoWidth  = grayImage.Width;
            myVideoHeight = grayImage.Height;

            //var thresh = grayImage.Clone();
            //double otsuThreshold = CvInvoke.Threshold(grayImage, thresh, 128.0, 255.0, Emgu.CV.CvEnum.ThresholdType.Otsu);
            //CvInvoke.Imwrite(Path.GetDirectoryName(myFile) + "\\" + Path.GetFileNameWithoutExtension(myFile) + "-threshold" + Path.GetExtension(myFile), thresh, new KeyValuePair<Emgu.CV.CvEnum.ImwriteFlags, int>(Emgu.CV.CvEnum.ImwriteFlags.PngCompression, 3));

            //Detect the AR Marker first

            // Initialise AR
            string myCameraFile = "data\\calib.dat";
            var    arParams     = LoadCameraFromFile(myCameraFile);
            // string myVConf = "-module=Image -preset=photo -format=BGRA";
            string myVConf = "-module=Image -width=" + myVideoWidth + " -height=" + myVideoHeight + " -format=MONO";

            ARToolKitFunctions.Instance.arwInitialiseAR();
            ARToolKitFunctions.Instance.arwInitARToolKit(myVConf, myCameraFile);
            string artkVersion = ARToolKitFunctions.Instance.arwGetARToolKitVersion();
            string pixelFormat = string.Empty;

            ARToolKitFunctions.Instance.arwSetLogLevel(0);
            myLogger = new Logger();

            Mat cameraMatrix     = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            int nFactors         = 8;
            Mat distortionCoeffs = new Mat(nFactors, 1, Emgu.CV.CvEnum.DepthType.Cv64F, 1);

            double[] cameraArray = new double[9];
            for (int j = 0; j < 3; j++)
            {
                for (int i = 0; i < 3; i++)
                {
                    cameraArray[j * 3 + i] = arParams.mat[j, i];
                }
            }
            double[] distCoeffArray = new double[nFactors];
            for (int i = 0; i < nFactors; i++)
            {
                distCoeffArray[i] = arParams.dist_factor[i];
            }
            Marshal.Copy(cameraArray, 0, cameraMatrix.DataPointer, 9);
            Marshal.Copy(distCoeffArray, 0, distortionCoeffs.DataPointer, nFactors);

            mdlRecognise.AddMarkersToARToolKit();

            var cornersErr  = new Emgu.CV.Util.VectorOfPointF();
            var cornersErr2 = new Emgu.CV.Util.VectorOfPointF();

            var retB = ARToolKitFunctions.Instance.arwUpdateARToolKit(grayImageBytes, false);

            for (int markerID = 0; markerID < 102; markerID++)
            {
                double[] mv = new double[16] {
                    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
                };
                double[] corners = new double[32] {
                    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
                };
                retB = ARToolKitFunctions.Instance.arwQueryMarkerTransformation(markerID, mv, corners, out int numCorners);
                if (!retB)
                {
                    continue;
                }

                var trans = OpenGL2Trans(mv);

                var pts2d        = new List <clsPoint>();
                var cornerPoints = new Emgu.CV.Util.VectorOfPointF();
                pts2d.Add(new clsPoint(-40, -40));
                pts2d.Add(new clsPoint(40, -40));
                pts2d.Add(new clsPoint(40, 40));
                pts2d.Add(new clsPoint(-40, 40));
                if (markerID == myGFMarkerID)
                {
                    pts2d.Add(new clsPoint(110 - 40, -40));
                    pts2d.Add(new clsPoint(110 + 40, -40));
                    pts2d.Add(new clsPoint(110 + 40, 40));
                    pts2d.Add(new clsPoint(110 - 40, 40));
                    pts2d.Add(new clsPoint(110 - 40, -40 - 190));
                    pts2d.Add(new clsPoint(110 + 40, -40 - 190));
                    pts2d.Add(new clsPoint(110 + 40, 40 - 190));
                    pts2d.Add(new clsPoint(110 - 40, 40 - 190));
                    pts2d.Add(new clsPoint(-40, -40 - 190));
                    pts2d.Add(new clsPoint(40, -40 - 190));
                    pts2d.Add(new clsPoint(40, 40 - 190));
                    pts2d.Add(new clsPoint(-40, 40 - 190));
                }
                else
                {
                    pts2d.Add(new clsPoint(-40 - 85, -40));
                    pts2d.Add(new clsPoint(40 - 85, -40));
                    pts2d.Add(new clsPoint(40 - 85, 40));
                    pts2d.Add(new clsPoint(-40 - 85, 40));
                }

                //var objectPoints = new Emgu.CV.Util.VectorOfPoint3D32F(pts2d.Select(p => new MCvPoint3D32f((float)p.x, (float)p.y, 0)).ToArray());

                //var reprojectPoints = new Emgu.CV.Util.VectorOfPointF();
                //Mat rvec = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
                //double[] matrixArray = new double[9];
                //for (int j = 0; j < 3; j++) {
                //    for (int k = 0; k < 3; k++) {
                //        matrixArray[j * 3 + k] = trans[j, k];
                //    }
                //}
                //Marshal.Copy(matrixArray, 0, rvec.DataPointer, 9);

                //Mat tvec = new Mat(3, 1, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
                //double[] vectorArray = new double[3];
                //for (int j = 0; j < 3; j++) {
                //    vectorArray[j] = trans[j, 3];
                //}
                //Marshal.Copy(vectorArray, 0, tvec.DataPointer, 3);
                //CvInvoke.ProjectPoints(objectPoints, rvec, tvec, cameraMatrix, distortionCoeffs, reprojectPoints);
                //cornerPoints.Push(reprojectPoints.ToArray());
                //cornersErr.Push(reprojectPoints.ToArray());

                for (int i = 0; i < pts2d.Count; i++)
                {
                    var pt = ModelToImageSpace(arParams, trans, pts2d[i]);
                    cornerPoints.Push(new PointF[] { new PointF((float)pt.X, (float)pt.Y) });
                }
                cornersErr.Push(cornerPoints.ToArray());

                //for (int i = 0; i < numCorners; i++) {
                //    //pts2d.Add(new clsPoint(0, 0));
                //    cornerPoints.Push(new PointF[] { new PointF((float)corners[i * 2], (float)corners[i * 2 + 1]) });
                //    //arParamIdeal2Observ(arParams.dist_factor, corners[i * 2], corners[i * 2 + 1], out double ox, out double oy, arParams.dist_function_version);
                //    //cornerPoints.Push(new PointF[] { new PointF((float)ox, (float)oy) });
                //}

                if (cornerPoints.Size == pts2d.Count)
                {
                    //cornersErr.Push(cornerPoints.ToArray());
                    //var cornersCopy = new List<clsPoint>();
                    //var cornersCopy2 = new List<clsPoint>();
                    //foreach (var p in cornerPoints.ToArray()) cornersCopy.Add(new clsPoint(p.X, p.Y));
                    //CvInvoke.CornerSubPix(grayImage, cornerPoints, new Size(5, 5), new Size(-1, -1), new Emgu.CV.Structure.MCvTermCriteria(100));

                    //foreach (var p in cornerPoints.ToArray()) cornersCopy2.Add(new clsPoint(p.X, p.Y));

                    //for (int i = 0; i < cornersCopy.Count; i++) {
                    //    cornersErr.Push(new PointF[] { new PointF((float)cornersCopy[i].x, (float)cornersCopy[i].y) });
                    //    if (cornersCopy[i].Dist(cornersCopy2[i]) > 4.0) {
                    //        cornersErr2.Push(new PointF[] { new PointF((float)cornersCopy2[i].x, (float)cornersCopy2[i].y) });
                    //    }
                    //}

                    //Emgu.CV.Util.VectorOfPointF imagePoints = new Emgu.CV.Util.VectorOfPointF();
                    //for (int i = 0; i < centerPoints.Size; i++) {
                    //    arParamObserv2Ideal(arParams.dist_factor, centerPoints[i].X, centerPoints[i].Y, out double ox, out double oy, arParams.dist_function_version);
                    //    imagePoints.Push(new PointF[] { new PointF((float)ox, (float)oy) });
                    //}

                    //Mat rvec = new Mat();
                    //Mat tvec = new Mat();
                    //CvInvoke.SolvePnP(objectPoints, imagePoints, cameraMatrix, distortionCoeffs, rvec, tvec, false, Emgu.CV.CvEnum.SolvePnpMethod.IPPE);
                    //Mat rotationMatrix = new Mat();
                    //CvInvoke.Rodrigues(rvec, rotationMatrix);

                    //trans = new double[3, 4];
                    //double[] rotationMatrixArray = new double[12];
                    //Marshal.Copy(rotationMatrix.DataPointer, rotationMatrixArray, 0, 12);
                    //double[] translationMatrixArray = new double[3];
                    //Marshal.Copy(tvec.DataPointer, translationMatrixArray, 0, 3);
                    //for (int j = 0; j < 3; j++) {
                    //    for (int i = 0; i < 3; i++) {
                    //        trans[j, i] = rotationMatrixArray[3 * j + i];
                    //    }
                    //    trans[j, 3] = translationMatrixArray[j];
                    //}

                    //mv = Trans2OpenGL(trans);
                }
            }

            DrawCornersOnImage(imageCopy, cornersErr, System.Drawing.Color.Green);
            //if (cornersErr2.Size > 0)  DrawCornersOnImage(imageCopy, cornersErr2, System.Drawing.Color.Red);
            CvInvoke.Imwrite(Path.GetDirectoryName(myFile) + "\\Corners-" + Path.GetFileNameWithoutExtension(myFile) + ".png", imageCopy, new KeyValuePair <Emgu.CV.CvEnum.ImwriteFlags, int>(Emgu.CV.CvEnum.ImwriteFlags.PngCompression, 3));
        }