Exemplo n.º 1
0
        private void cameraButton_Click(object sender, EventArgs e)
        {
            if (_capture != null)
            {
                if (_captureInProgress)
                {
                    int totalPoints = _markerCounterPerFrame.ToArray().Sum();
                    if (totalPoints > 0)
                    {
                        double repError = ArucoInvoke.CalibrateCameraAruco(_allCorners, _allIds, _markerCounterPerFrame, ArucoBoard, _imageSize,
                                                                           _cameraMatrix, _distCoeffs, null, null, CalibType.Default, new MCvTermCriteria(30, double.Epsilon));

                        UpdateMessage(String.Format("Camera calibration completed with reprojection error: {0}", repError));
                        _allCorners.Clear();
                        _allIds.Clear();
                        _markerCounterPerFrame.Clear();
                        _imageSize = Size.Empty;
                    }

                    //stop the capture
                    cameraButton.Text = "Start Capture";
                    _capture.Pause();
                }
                else
                {
                    //start the capture
                    cameraButton.Text = "Stop Capture";
                    _capture.Start();
                }

                _captureInProgress = !_captureInProgress;
            }
        }
Exemplo n.º 2
0
        public MarkerResult[] FindMarkers(Bitmap image)
        {
            Image <Bgr, byte> openCVImage = new Image <Bgr, byte>(image);

            Dictionary.PredefinedDictionaryName name = new Dictionary.PredefinedDictionaryName();
            Dictionary             Dict       = new Dictionary(name);
            VectorOfVectorOfPointF Corners    = new VectorOfVectorOfPointF();
            VectorOfInt            Ids        = new VectorOfInt();
            DetectorParameters     Parameters = DetectorParameters.GetDefault();

            VectorOfVectorOfPointF Rejected = new VectorOfVectorOfPointF();

            ArucoInvoke.DetectMarkers(openCVImage, Dict, Corners, Ids, Parameters, Rejected);

            var markers = new MarkerResult[Corners.Size];

            for (int i = 0; i < Corners.Size; i++)
            {
                var markerCorners = new Vector2[4];

                for (int y = 0; y < 4; y++)
                {
                    markerCorners[y] = new Vector2(Corners[i][y].X, Corners[i][y].Y);
                }

                markers[i] = new MarkerResult(Ids[i], markerCorners);
            }

            return(markers);
        }
Exemplo n.º 3
0
    private void HandleGrab(object sender, EventArgs e)
    {
        Mat image = new Mat();

        if (capture.IsOpened)
        {
            capture.Retrieve(image);
        }
        if (image.IsEmpty)
        {
            return;
        }
        Mat grayImg = image.Clone();

        CvInvoke.CvtColor(image, grayImg, ColorConversion.Bgr2Gray);
        CvInvoke.AdaptiveThreshold(grayImg, grayImg, 255, AdaptiveThresholdType.MeanC, ThresholdType.BinaryInv, 21, 11);

        VectorOfInt            ids      = new VectorOfInt();
        VectorOfVectorOfPointF corners  = new VectorOfVectorOfPointF();
        VectorOfVectorOfPointF rejected = new VectorOfVectorOfPointF();

        ArucoInvoke.DetectMarkers(image, dico, corners, ids, arucoParam, rejected);

        if (ids.Size > 0)
        {
            ArucoInvoke.DrawDetectedMarkers(image, corners, ids, new MCvScalar(0, 0, 255));
        }

        CvInvoke.Imshow("Original", image);
        CvInvoke.Imshow("Gray", grayImg);
    }
Exemplo n.º 4
0
        public static Mat Draw(Mat image, VectorOfInt markerIds, VectorOfVectorOfPointF markerCorners, VectorOfInt charucoIds, VectorOfPointF charucoCorners)
        {
            Mat result = image.ToImage <Rgb, byte>().Mat;

            ArucoInvoke.DrawDetectedMarkers(result, markerCorners, markerIds, new MCvScalar(255, 0, 0));
            ArucoInvoke.DrawDetectedCornersCharuco(result, charucoCorners, charucoIds, new MCvScalar(255, 255, 0));

            return(result);
        }
Exemplo n.º 5
0
 public void Visualize(FrameResult fr)
 {
     if (fr.QRLocation != null)
     {
         fr.Visual.DrawPolyline(fr.QRLocation, true, new Bgr(0, 0, 255), 2);
         fr.Visual.Draw(new CircleF(fr.QRCenter, 6), new Bgr(0, 255, 0), 2);
         fr.Visual.Draw(new CircleF(fr.QRLocation[0], 6), new Bgr(255, 255, 255), 2);
         fr.Visual.Draw(new CircleF(fr.QRLocation[2], 6), new Bgr(255, 0, 0), 2);
         Point TopMiddle = PointOperations.Middle(fr.QRLocation[0], fr.QRLocation[3]);
         fr.Visual.Draw(new LineSegment2D(fr.QRCenter, TopMiddle), new Bgr(0, 255, 0), 2);
     }
     if (fr.ArucoLocation != null)
     {
         ArucoInvoke.DrawDetectedMarkers(fr.Visual, corners, ids, new MCvScalar(0, 255, 0));
     }
 }
Exemplo n.º 6
0
        private void CalibrateCamera()
        {
            int totalPoints = _markerCounterPerFrame.ToArray().Sum();

            if (totalPoints > 0)
            {
                double repError = ArucoInvoke.CalibrateCameraAruco(_allCorners, _allIds, _markerCounterPerFrame, ArucoBoard, _imageSize,
                                                                   _cameraMatrix, _distCoeffs, null, null, CalibType.Default, new MCvTermCriteria(30, double.Epsilon));

                UpdateMessage(String.Format("Camera calibration completed with reprojection error: {0}", repError));
                _allCorners.Clear();
                _allIds.Clear();
                _markerCounterPerFrame.Clear();
                _imageSize = Size.Empty;
            }
        }
Exemplo n.º 7
0
        public static double ValidateCharuco(int squaresX, int squaresY, float squareLength, float markerLength, PredefinedDictionaryName dictionary, Size imageSize, VectorOfInt charucoIds, VectorOfPointF charucoCorners, VectorOfInt markerCounterPerFrame, bool fisheye, Func <byte[], byte[]> GetRemoteChessboardCorner, Mat cameraMatrix, Mat distCoeffs)
        {
            VectorOfVectorOfPoint3D32F processedObjectPoints = new VectorOfVectorOfPoint3D32F();
            VectorOfVectorOfPointF     processedImagePoints  = new VectorOfVectorOfPointF();
            VectorOfPoint3D32F         rvecs = new VectorOfPoint3D32F();
            VectorOfPoint3D32F         tvecs = new VectorOfPoint3D32F();

            int k = 0;

            for (int i = 0; i < markerCounterPerFrame.Size; i++)
            {
                int                nMarkersInThisFrame       = markerCounterPerFrame[i];
                VectorOfPointF     currentImgPoints          = new VectorOfPointF();
                VectorOfPointF     currentImgPointsUndistort = new VectorOfPointF();
                VectorOfInt        currentIds       = new VectorOfInt();
                VectorOfPoint3D32F currentObjPoints = new VectorOfPoint3D32F();
                Mat                tvec             = new Mat();
                Mat                rvec             = new Mat();

                for (int j = 0; j < nMarkersInThisFrame; j++)
                {
                    currentImgPoints.Push(new PointF[] { charucoCorners[k] });
                    currentIds.Push(new int[] { charucoIds[k] });
                    currentObjPoints.Push(new MCvPoint3D32f[] { GetChessboardCorner(squaresX, squaresY, squareLength, markerLength, charucoIds[k], dictionary, GetRemoteChessboardCorner) });
                    k++;
                }

                Mat distCoeffsNew = new Mat(1, 4, DepthType.Cv64F, 1);
                distCoeffsNew.SetValue(0, 0, 0);
                distCoeffsNew.SetValue(0, 1, 0);
                distCoeffsNew.SetValue(0, 2, 0);
                distCoeffsNew.SetValue(0, 3, 0);

                Fisheye.UndistorPoints(currentImgPoints, currentImgPointsUndistort, cameraMatrix, distCoeffs, Mat.Eye(3, 3, DepthType.Cv64F, 1), Mat.Eye(3, 3, DepthType.Cv64F, 1));
                if (ArucoInvoke.EstimatePoseCharucoBoard(currentImgPointsUndistort, currentIds, CreateBoard(squaresX, squaresY, squareLength, markerLength, new Dictionary(dictionary)), Mat.Eye(3, 3, DepthType.Cv64F, 1), distCoeffsNew, rvec, tvec))
                {
                    rvecs.Push(new MCvPoint3D32f[] { new MCvPoint3D32f((float)rvec.GetValue(0, 0), (float)rvec.GetValue(1, 0), (float)rvec.GetValue(2, 0)) });
                    tvecs.Push(new MCvPoint3D32f[] { new MCvPoint3D32f((float)tvec.GetValue(0, 0), (float)tvec.GetValue(1, 0), (float)tvec.GetValue(2, 0)) });

                    processedImagePoints.Push(currentImgPoints);
                    processedObjectPoints.Push(currentObjPoints);
                }
            }

            return(Validate(processedObjectPoints, processedImagePoints, cameraMatrix, distCoeffs, rvecs, tvecs, fisheye));
        }
Exemplo n.º 8
0
        /// <summary>
        /// Обрабатывает один поступивший кадр.
        /// </summary>
        /// <param name="frame">Кадр для анализа.</param>
        /// <returns>Сведения об объектах в кадре.</returns>
        public FrameResult ProcessFrame(Image <Bgr, byte> frame)
        {
            if (bwframe == null)
            {
                bwframe = new Image <Gray, byte>(frame.Size);
            }
            if (grayframe == null)
            {
                grayframe = new Image <Gray, byte>(frame.Size);
            }
            //if (vid == null) vid = new VideoWriter("video.avi", 24, frame.Size, true);
            vid?.Write(frame.Mat);
            grayframe.ConvertFrom(frame);
            int blocksize = 51;

            CvInvoke.AdaptiveThreshold(grayframe, bwframe, 255, Emgu.CV.CvEnum.AdaptiveThresholdType.MeanC, Emgu.CV.CvEnum.ThresholdType.Binary, blocksize, 5);
            FrameResult fr = new FrameResult(frame);

            ArucoInvoke.DetectMarkers(bwframe, ArucoDictionary, corners, ids, _detectorParameters, rejected);
            if (ids.Size > 0)
            {
                fr.ArucoLocation = corners[0].ToArray();
            }
            // Ищем коды на изображении
            List <ZBar.Symbol> symbols = Scanner.Scan(bwframe.ToBitmap());

            if (symbols.Count == 0)
            {
                return(fr);
            }
            ZBar.Symbol s = symbols[0];
            fr.QRLocation = s.Location.ToArray();
            fr.QRCenter   = PointOperations.Middle(fr.QRLocation);
            Point RightMiddle = PointOperations.Middle(fr.QRLocation[3], fr.QRLocation[2]);

            fr.TiltAngle  = Math.Atan2(RightMiddle.Y - fr.QRCenter.Y, RightMiddle.X - fr.QRCenter.X);
            fr.Commentary = s.Data;

            Visualize(fr);
            return(fr);
        }
Exemplo n.º 9
0
        public static (VectorOfInt markerIds, VectorOfVectorOfPointF markerCorners, VectorOfInt charucoIds, VectorOfPointF charucoCorners) Detect(Mat image, int squaresX, int squaresY, float squareLength, float markerLength, PredefinedDictionaryName dictionary)
        {
            VectorOfInt            markerIds             = new VectorOfInt();
            VectorOfVectorOfPointF markerCorners         = new VectorOfVectorOfPointF();
            VectorOfInt            charucoIds            = new VectorOfInt();
            VectorOfPointF         charucoCorners        = new VectorOfPointF();
            VectorOfVectorOfPointF rejectedMarkerCorners = new VectorOfVectorOfPointF();

            DetectorParameters decParameters = DetectorParameters.GetDefault();

            ArucoInvoke.DetectMarkers(image, new Dictionary(dictionary), markerCorners, markerIds, decParameters, rejectedMarkerCorners);

            ArucoInvoke.RefineDetectedMarkers(image, CreateBoard(squaresX, squaresY, squareLength, markerLength, new Dictionary(dictionary)), markerCorners, markerIds, rejectedMarkerCorners, null, null, 10, 3, true, null, decParameters);

            if (markerIds.Size > 0)
            {
                ArucoInvoke.InterpolateCornersCharuco(markerCorners, markerIds, image, CreateBoard(squaresX, squaresY, squareLength, markerLength, new Dictionary(dictionary)), charucoCorners, charucoIds, null, null, 2);
            }

            return(markerIds, markerCorners, charucoIds, charucoCorners);
        }
Exemplo n.º 10
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            if (_capture != null && _capture.Ptr != IntPtr.Zero)
            {
                _capture.Retrieve(_frame, 0);
                _frame.CopyTo(_frameCopy);

                //cameraImageBox.Image = _frame;
                using (VectorOfInt ids = new VectorOfInt())
                    using (VectorOfVectorOfPointF corners = new VectorOfVectorOfPointF())
                        using (VectorOfVectorOfPointF rejected = new VectorOfVectorOfPointF())
                        {
                            //DetectorParameters p = DetectorParameters.GetDefault();
                            ArucoInvoke.DetectMarkers(_frameCopy, ArucoDictionary, corners, ids, _detectorParameters, rejected);

                            if (ids.Size > 0)
                            {
                                ArucoInvoke.RefineDetectedMarkers(_frameCopy, ArucoBoard, corners, ids, rejected, null, null, 10, 3, true, null, _detectorParameters);
                                //cameraButton.Text = "Calibrate camera";
                                this.Invoke((Action) delegate
                                {
                                    useThisFrameButton.Enabled = true;
                                });
                                ArucoInvoke.DrawDetectedMarkers(_frameCopy, corners, ids, new MCvScalar(0, 255, 0));

                                if (!_cameraMatrix.IsEmpty && !_distCoeffs.IsEmpty)
                                {
                                    ArucoInvoke.EstimatePoseSingleMarkers(corners, markersLength, _cameraMatrix, _distCoeffs, rvecs, tvecs);
                                    for (int i = 0; i < ids.Size; i++)
                                    {
                                        using (Mat rvecMat = rvecs.Row(i))
                                            using (Mat tvecMat = tvecs.Row(i))
                                                using (VectorOfDouble rvec = new VectorOfDouble())
                                                    using (VectorOfDouble tvec = new VectorOfDouble())
                                                    {
                                                        double[] values = new double[3];
                                                        rvecMat.CopyTo(values);
                                                        rvec.Push(values);
                                                        tvecMat.CopyTo(values);
                                                        tvec.Push(values);


                                                        ArucoInvoke.DrawAxis(_frameCopy, _cameraMatrix, _distCoeffs, rvec, tvec,
                                                                             markersLength * 0.5f);
                                                    }
                                    }
                                }

                                if (_useThisFrame)
                                {
                                    _allCorners.Push(corners);
                                    _allIds.Push(ids);
                                    _markerCounterPerFrame.Push(new int[] { corners.Size });
                                    _imageSize = _frame.Size;
                                    UpdateMessage(String.Format("Using {0} points", _markerCounterPerFrame.ToArray().Sum()));
                                    _useThisFrame = false;
                                }
                            }
                            else
                            {
                                this.Invoke((Action) delegate
                                {
                                    useThisFrameButton.Enabled = false;
                                });

                                //cameraButton.Text = "Stop Capture";
                            }
                            cameraImageBox.Image = _frameCopy.Clone();
                        }
            }
            else
            {
                UpdateMessage("VideoCapture was not created");
            }
        }
Exemplo n.º 11
0
    private void HandleWebcam(object sender, EventArgs e)
    {
        Image <Bgr, byte> origin = new Image <Bgr, byte>(webcam.Width, webcam.Height);

        if (webcam.IsOpened)
        {
            webcam.Retrieve(origin);
        }

        Image <Bgr, byte> markers     = origin.Clone();
        Image <Bgr, byte> output      = origin.Clone();
        Image <Bgr, byte> transformed = new Image <Bgr, byte>(512, 512);

        // Gather marker
        VectorOfInt            ids     = new VectorOfInt();
        VectorOfVectorOfPointF corners = new VectorOfVectorOfPointF();

        ArucoInvoke.DetectMarkers(origin, markerDict, corners, ids, arucoParameters);

        PointF[] wrapCorners = new PointF[4];
        wrapCorners[0] = new PointF(0, 0);
        wrapCorners[1] = new PointF(512, 0);
        wrapCorners[2] = new PointF(512, 512);
        wrapCorners[3] = new PointF(0, 512);
        Mat perspectiveMatrix = CvInvoke.GetPerspectiveTransform(boardCorners, wrapCorners);

        CvInvoke.WarpPerspective(origin, transformed, perspectiveMatrix, new Size(512, 512));

        for (int i = 0; i < ids.Size; i++)
        {
            switch (ids[i])
            {
            case 0:
            case 1:
            case 2:
            case 3:
                boardCorners[ids[i]] = getCentroid(corners[i]);
                break;

            case 4:
            case 5:
            case 6:
            case 7:
                markerPositions[ids[i] - 4] = getCentroid(corners[i]);
                break;
            }
        }
        markerWrapPositions = CvInvoke.PerspectiveTransform(markerPositions, perspectiveMatrix);

        CvInvoke.Line(output, new Point((int)boardCorners[0].X, (int)boardCorners[0].Y), new Point((int)boardCorners[1].X, (int)boardCorners[1].Y), new MCvScalar(0, 0, 255), 2);
        CvInvoke.Line(output, new Point((int)boardCorners[1].X, (int)boardCorners[1].Y), new Point((int)boardCorners[2].X, (int)boardCorners[2].Y), new MCvScalar(0, 0, 255), 2);
        CvInvoke.Line(output, new Point((int)boardCorners[2].X, (int)boardCorners[2].Y), new Point((int)boardCorners[3].X, (int)boardCorners[3].Y), new MCvScalar(0, 0, 255), 2);
        CvInvoke.Line(output, new Point((int)boardCorners[3].X, (int)boardCorners[3].Y), new Point((int)boardCorners[0].X, (int)boardCorners[0].Y), new MCvScalar(0, 0, 255), 2);
        CvInvoke.Line(output, new Point((int)boardCorners[0].X, (int)boardCorners[0].Y), new Point((int)boardCorners[2].X, (int)boardCorners[2].Y), new MCvScalar(0, 0, 255), 2);
        CvInvoke.Line(output, new Point((int)boardCorners[1].X, (int)boardCorners[1].Y), new Point((int)boardCorners[3].X, (int)boardCorners[3].Y), new MCvScalar(0, 0, 255), 2);

        // For graphic debug purpose
        ArucoInvoke.DetectMarkers(transformed, markerDict, corners, ids, arucoParameters);
        if (ids.Size > 0)
        {
            ArucoInvoke.DrawDetectedMarkers(transformed, corners, ids, new MCvScalar(0, 0, 255));
        }

        // CvInvoke.Imshow("Markers view", markers);
        CvInvoke.Imshow("Output view", output);
        CvInvoke.Imshow("Wrap view", transformed);
        // CvInvoke.Imshow("Shape detection view", shapeDetection);
    }
Exemplo n.º 12
0
        private void ProcessFrame()
        {
            while (_capture != null && _capture.Ptr != IntPtr.Zero)
            {
                _capture.Retrieve(_frame, 0);
                _frame.CopyTo(_frameCopy);

                using (VectorOfInt ids = new VectorOfInt())
                    using (VectorOfVectorOfPointF corners = new VectorOfVectorOfPointF())
                        using (VectorOfVectorOfPointF rejected = new VectorOfVectorOfPointF())
                        {
                            //DetectorParameters p = DetectorParameters.GetDefault();
                            ArucoInvoke.DetectMarkers(_frameCopy, ArucoDictionary, corners, ids, _detectorParameters, rejected);

                            if (ids.Size > 0)
                            {
                                //ArucoInvoke.RefineDetectedMarkers(_frameCopy, ArucoBoard, corners, ids, rejected, null, null, 10, 3, true, null, _detectorParameters);
                                //cameraButton.Text = "Calibrate camera";
                                ArucoInvoke.DrawDetectedMarkers(_frameCopy, corners, ids, new MCvScalar(0, 255, 0));
                                if (!_cameraMatrix.IsEmpty && !_distCoeffs.IsEmpty)
                                {
                                    ArucoInvoke.EstimatePoseSingleMarkers(corners, markersLength, _cameraMatrix, _distCoeffs, rvecs, tvecs);
                                    for (int i = 0; i < ids.Size; i++)
                                    {
                                        using (Mat rvecMat = rvecs.Row(i))
                                            using (Mat tvecMat = tvecs.Row(i))
                                                using (VectorOfDouble rvec = new VectorOfDouble())
                                                    using (VectorOfDouble tvec = new VectorOfDouble())
                                                    {
                                                        double[] values = new double[3];
                                                        rvecMat.CopyTo(values);
                                                        rvec.Push(values);
                                                        tvecMat.CopyTo(values);
                                                        tvec.Push(values);
                                                        ArucoInvoke.DrawAxis(_frameCopy, _cameraMatrix, _distCoeffs, rvec, tvec,
                                                                             markersLength * 0.5f);
                                                    }
                                    }
                                }
                                float counterX = 0, counterY = 0;
                                int   count = corners.Size;
                                for (int i = 0; i < count; ++i)
                                {
                                    using (VectorOfPointF corner = corners[i])
                                    {
                                        PointF[] cor = corner.ToArray();
                                        for (int j = 0; j < cor.Length; j++)
                                        {
                                            //Console.WriteLine(cor[j].X);
                                            counterX += cor[j].X;
                                            counterY += cor[j].Y;
                                        }
                                        markersX = counterX / 4;
                                        markersY = counterY / 4;
                                    }
                                }
                            }
                        }
                CvInvoke.Undistort(_frameCopy, _output, _cameraMatrix, _distCoeffs);
                CvInvoke.Imshow("out", _output);
                CvInvoke.WaitKey(10);
                //Console.WriteLine("markersX is " + markersX);
                // Console.WriteLine("markersY is " + markersY);
            }
            //else
            //{
            Console.WriteLine("VideoCapture was not created");
            //}
        }
        void rgbReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    var width  = frame.FrameDescription.Width;
                    var height = frame.FrameDescription.Height;
                    var bitmap = frame.ToBitmap();
                    var image  = bitmap.ToOpenCVImage <Bgr, byte>().Mat;

                    //do something here with the IImage

                    int frameSkip = 1;
                    //every 10 frames

                    if (++frameCount == frameSkip)
                    {
                        frameCount = 0;
                        using (VectorOfInt ids = new VectorOfInt())
                            using (VectorOfVectorOfPointF corners = new VectorOfVectorOfPointF())
                                using (VectorOfVectorOfPointF rejected = new VectorOfVectorOfPointF())
                                {
                                    ArucoInvoke.DetectMarkers(image, ArucoDictionary, corners, ids, _detectorParameters, rejected);

                                    if (ids.Size > 0)
                                    {
                                        ArucoInvoke.RefineDetectedMarkers(image, ArucoBoard, corners, ids, rejected, null, null, 10, 3, true, null, _detectorParameters);

                                        ArucoInvoke.DrawDetectedMarkers(image, corners, ids, new MCvScalar(0, 255, 0));

                                        if (!_cameraMatrix.IsEmpty && !_distCoeffs.IsEmpty)
                                        {
                                            ArucoInvoke.EstimatePoseSingleMarkers(corners, markersLength, _cameraMatrix, _distCoeffs, rvecs, tvecs);
                                            for (int i = 0; i < ids.Size; i++)
                                            {
                                                using (Mat rvecmat = rvecs.Row(i))
                                                    using (Mat tvecmat = tvecs.Row(i))
                                                        using (VectorOfDouble rvec = new VectorOfDouble())
                                                            using (VectorOfDouble tvec = new VectorOfDouble())
                                                            {
                                                                double[] values = new double[3];
                                                                rvecmat.CopyTo(values);
                                                                rvec.Push(values);
                                                                tvecmat.CopyTo(values);
                                                                tvec.Push(values);

                                                                ArucoInvoke.DrawAxis(image, _cameraMatrix, _distCoeffs, rvec, tvec,
                                                                                     markersLength * 0.5f);
                                                            }
                                            }
                                        }

                                        if (calibrate)
                                        {
                                            _allCorners.Push(corners);
                                            _allIds.Push(ids);
                                            _markerCounterPerFrame.Push(new int[] { corners.Size });
                                            _imageSize  = image.Size;
                                            calibrated += 1;

                                            if (calibrated >= calibrationFreq)
                                            {
                                                calibrate = false;
                                            }
                                        }

                                        int totalPoints = _markerCounterPerFrame.ToArray().Sum();
                                        if (calibrated >= calibrationFreq && totalPoints > 0)
                                        {
                                            ArucoInvoke.CalibrateCameraAruco(_allCorners, _allIds, _markerCounterPerFrame, ArucoBoard, _imageSize,
                                                                             _cameraMatrix, _distCoeffs, null, null, CalibType.Default, new MCvTermCriteria(30, double.Epsilon));

                                            _allCorners.Clear();
                                            _allIds.Clear();
                                            _markerCounterPerFrame.Clear();
                                            _imageSize = System.Drawing.Size.Empty;
                                            calibrated = 0;
                                            Console.WriteLine("Calibrated");
                                        }
                                    }
                                }
                    }
                    //end doing something

                    this.background = image.Bitmap.XNATextureFromBitmap(background);
                    bitmap.Dispose();
                    image.Dispose();
                }
            }
        }
Exemplo n.º 14
0
        private void VideoSource_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs)
        {
            if (pictureBoxVideo.Image != null)
            {
                pictureBoxVideo.Image.Dispose();
            }
            if (eventArgs.Frame != null)
            {
                Image <Bgr, Byte> frameimage = eventArgs.Frame.ToImage <Bgr, Byte>().Clone();
                _frameCopy = frameimage.Mat;

                using (VectorOfInt ids = new VectorOfInt())
                    using (VectorOfVectorOfPointF corners = new VectorOfVectorOfPointF())
                        using (VectorOfVectorOfPointF rejected = new VectorOfVectorOfPointF())
                        {
                            //DetectorParameters p = DetectorParameters.GetDefault();
                            ArucoInvoke.DetectMarkers(_frameCopy, ArucoDictionary, corners, ids, _detectorParameters, rejected);

                            if (ids.Size > 0)
                            {
                                ArucoInvoke.RefineDetectedMarkers(_frameCopy, ArucoBoard, corners, ids, rejected, null, null, 10, 3, true, null, _detectorParameters);
                                //cameraButton.Text = "Calibrate camera";
                                this.Invoke((Action) delegate
                                {
                                    useThisFrameButton.Enabled = true;
                                });
                                ArucoInvoke.DrawDetectedMarkers(_frameCopy, corners, ids, new MCvScalar(0, 255, 0));

                                if (!_cameraMatrix.IsEmpty && !_distCoeffs.IsEmpty)
                                {
                                    ArucoInvoke.EstimatePoseSingleMarkers(corners, markersLength, _cameraMatrix, _distCoeffs, rvecs, tvecs);
                                    for (int i = 0; i < ids.Size; i++)
                                    {
                                        using (Mat rvecMat = rvecs.Row(i))
                                            using (Mat tvecMat = tvecs.Row(i))
                                                using (VectorOfDouble rvec = new VectorOfDouble())
                                                    using (VectorOfDouble tvec = new VectorOfDouble())
                                                    {
                                                        double[] values = new double[3];
                                                        rvecMat.CopyTo(values);
                                                        rvec.Push(values);
                                                        tvecMat.CopyTo(values);
                                                        tvec.Push(values);
                                                        if (ids.Size == 1)
                                                        {
                                                            UpdatePosition(tvec, rvec);
                                                        }

                                                        ArucoInvoke.DrawAxis(_frameCopy, _cameraMatrix, _distCoeffs, rvec, tvec,
                                                                             markersLength * 0.5f);
                                                    }
                                    }
                                }

                                if (_useThisFrame)
                                {
                                    _allCorners.Push(corners);
                                    _allIds.Push(ids);
                                    _markerCounterPerFrame.Push(new int[] { corners.Size });
                                    _imageSize = _frameCopy.Size;
                                    UpdateMessage(String.Format("Using {0} points", _markerCounterPerFrame.ToArray().Sum()));
                                    _useThisFrame = false;
                                    CalibrateCamera();
                                }
                            }
                            else
                            {
                                this.Invoke((Action) delegate
                                {
                                    useThisFrameButton.Enabled = false;
                                });

                                //cameraButton.Text = "Stop Capture";
                            }
                            pictureBoxVideo.Image = _frameCopy.ToBitmap();
                        }
            }
        }