Esempio n. 1
0
        public void ProcessFrame()
        {
            if (this.State == CalibratorState.CornersRecognized | this.State == CalibratorState.Calibrated)
            {
                return;
            }

            this.OriginalImage = m_Capture.QueryFrame();
            this.GrayImage     = this.OriginalImage.Convert <Gray, Byte>();

            bool foundAllCorners = CameraCalibration.FindChessboardCorners(
                this.GrayImage,
                this.ChessBoard.PatternSize,
                Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.FILTER_QUADS,
                out m_FoundCorners);

            PointF[][] foundPointsForChannels = new PointF[][] { m_FoundCorners };
            if (foundAllCorners)
            {
                MCvTermCriteria terminationCriteria;
                terminationCriteria.max_iter = 30;
                terminationCriteria.epsilon  = 0.1;
                terminationCriteria.type     = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_EPS | Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;

                this.GrayImage.FindCornerSubPix(foundPointsForChannels, new Size(11, 11), new Size(-1, -1), terminationCriteria);

                CameraCalibration.DrawChessboardCorners(this.GrayImage, this.ChessBoard.PatternSize, m_FoundCorners, foundAllCorners);
            }

            // we are done with processing. If needed we flip the images for display purposes only.
            Emgu.CV.CvEnum.FLIP flipType = Emgu.CV.CvEnum.FLIP.NONE;
            if (this.FlipHorizontal)
            {
                flipType = Emgu.CV.CvEnum.FLIP.HORIZONTAL;
            }
            if (this.FlipVertical)
            {
                flipType = flipType |= Emgu.CV.CvEnum.FLIP.VERTICAL;
            }

            this.OriginalImage._Flip(flipType);
            this.GrayImage._Flip(flipType);

            if (foundAllCorners && this.State == CalibratorState.WaitingForCornersRecognition)
            {
                this.State = CalibratorState.CornersRecognized;
            }
        }
Esempio n. 2
0
        public void ProcessFrame()
        {
            if (m_Capture == null)
            {
                return;
            }
            this.OriginalImage = m_Capture.QueryFrame();
            this.GrayImage     = this.OriginalImage.Convert <Gray, Byte>();

            bool foundAllCorners = CameraCalibration.FindChessboardCorners(
                this.GrayImage,
                this.CaptureSettings.ChessBoard.PatternSize,
                Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.FILTER_QUADS,
                out m_FoundCorners);

            PointF[][] foundPointsForChannels = new PointF[][] { m_FoundCorners };
            if (foundAllCorners)
            {
                MCvTermCriteria terminationCriteria;
                terminationCriteria.max_iter = 30;
                terminationCriteria.epsilon  = 0.05;
                terminationCriteria.type     = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_EPS | Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;

                this.GrayImage.FindCornerSubPix(foundPointsForChannels, new Size(11, 11), new Size(-1, -1), terminationCriteria);

                CameraCalibration.DrawChessboardCorners(this.GrayImage, this.CaptureSettings.ChessBoard.PatternSize, m_FoundCorners, foundAllCorners);
            }

            // we are done with processing. If needed we flip the images for display purposes only.
            Emgu.CV.CvEnum.FLIP flipType = Emgu.CV.CvEnum.FLIP.NONE;
            if (this.FlipHorizontal)
            {
                flipType = Emgu.CV.CvEnum.FLIP.HORIZONTAL;
            }
            if (this.FlipVertical)
            {
                flipType = flipType |= Emgu.CV.CvEnum.FLIP.VERTICAL;
            }

            this.OriginalImage._Flip(flipType);
            this.GrayImage._Flip(flipType);

            if (!foundAllCorners)
            {
                return;
            }

            if (this.CurrentCapturedImagesCount >= this.CaptureSettings.ImagesCount)
            {
                // we got already all required images
                return;
            }

            DateTime utcNow = DateTime.UtcNow;

            if (utcNow.Ticks - m_TimeOfLastCapture.Ticks < this.CaptureSettings.WaitBetweenCaptures.Ticks)
            {
                // We need to wait longer
                return;
            }

            // We capture the image
            m_TimeOfLastCapture = utcNow;
            this.CurrentCapturedImagesCount++;
            this.OriginalImage.Save(this.CaptureSettings.GetFilePath(this.CurrentCapturedImagesCount));
            this.OriginalImage = this.OriginalImage.Not();

            RaiseChangedEvent();
        }