// orients the image so that the plane of the sign is parallel to the camera
        Image <Bgr, Byte> orient(Image <Bgr, Byte> i)
        {
            if (annotation_file == "")
            {
                return(i);
            }

            image = i.Clone();
            List <Point> points = (new ReadAnnotationPoints.AnnotationPoints(annotation_file)).Points;

            PointF[] src = new PointF[4];
            PointF[] des = new PointF[4];

            src[0] = new PointF(30, 200f);
            src[1] = new PointF(200, 30f);
            src[2] = new PointF(370, 200f);
            src[3] = new PointF(200, 370f);

            for (int x = 0; x < 4; x++)
            {
                des[x] = points[x];
            }

            HomographyMatrix h**o = CameraCalibration.GetPerspectiveTransform(src, des);

            image = image.WarpPerspective(h**o, image.Width, image.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR,
                                          Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Bgr(200, 0, 0));

            image.ROI = new Rectangle(0, 0, 400, 400);

            return(image);
        }
Beispiel #2
0
        public void Update(Image <Bgr, byte> nowImage)
        {
            DebugImage = nowImage;
            DerivePitchEdges(nowImage);

            TopLeft     = GetTopLeft();
            TopRight    = GetTopRight();
            BottomLeft  = GetBottomLeft();
            BottomRight = GetBottomRight();

            PointF[] sourcePoints = { TopLeft, TopRight, BottomLeft, BottomRight };
            PointF[] destPoints   =
            {
                new PointF(Instep,              Border),
                new PointF(PitchWidth - Instep, Border),
                new PointF(Instep,              PitchHeight + Border),
                new PointF(PitchWidth - Instep, PitchHeight + Border)
            };

            m_WarpMat    = CameraCalibration.GetPerspectiveTransform(sourcePoints, destPoints);
            m_WarpMatInv = CameraCalibration.GetPerspectiveTransform(destPoints, sourcePoints);

            PerspImage = nowImage.WarpPerspective(m_WarpMat, 1205, (int)(PitchHeight + Border * 2),
                                                  Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC,
                                                  Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS,
                                                  new Bgr(200, 200, 200)).Convert <Bgr, byte>();
            ThresholdedPerspImage = ImageProcess.ThresholdHsv(PerspImage, 22, 89, 33, 240, 40, 250).
                                    ThresholdBinaryInv(new Gray(100), new Gray(255));

            //DerivePolePositions();
        }
Beispiel #3
0
        public MainForm()
        {
            InitializeComponent();
            m_CameraParameters = CameraParameters.Load(@"..\..\..\..\CalibrationFiles\MicrosoftCinema\Focus14\1280x720\MicrosoftCinemaFocus14_1280x720.txt");

            m_RawImage         = new Image <Bgr, byte>(@"..\..\..\..\CalibrationFiles\MicrosoftCinema\Focus14\1280x720\GroundProjectionCalibration.jpg");
            this.CurrentImage  = m_RawImage.Clone();
            this.BirdsEyeImage = m_RawImage.Clone();

            InitializeUndistortMap(m_RawImage);

            Undistort(m_RawImage, this.CurrentImage);

            this.ChessBoard = new ChessBoard(8, 10);
            PointF[] foundCorners = CollectImageCorners();

            DrawFoundCorners(this.CurrentImage, foundCorners);

            // We pick four corners for perspective transform
            PointF[] outerCorners = new PointF[4];
            outerCorners[0] = foundCorners[0];
            outerCorners[1] = foundCorners[this.ChessBoard.PatternSize.Width - 1];
            outerCorners[2] = foundCorners[this.ChessBoard.PatternSize.Width * this.ChessBoard.PatternSize.Height - this.ChessBoard.PatternSize.Width];
            outerCorners[3] = foundCorners[this.ChessBoard.PatternSize.Width * this.ChessBoard.PatternSize.Height - 1];
            DrawOuterCorners(this.CurrentImage, outerCorners);

            float side;
            float bottom;
            float centerX;

            side   = 25.0f;
            bottom = 310.0f;

            PointF[] physicalPointsForCalculation = new PointF[4];
            physicalPointsForCalculation[0] = new PointF(-3 * side, bottom + 8 * side);
            physicalPointsForCalculation[1] = new PointF(+3 * side, bottom + 8 * side);
            physicalPointsForCalculation[2] = new PointF(-3 * side, bottom);
            physicalPointsForCalculation[3] = new PointF(+3 * side, bottom);

            m_BirdsEyeViewTransformationForCalculation = CameraCalibration.GetPerspectiveTransform(outerCorners, physicalPointsForCalculation);
            HomographyMatrixSupport.Save(m_BirdsEyeViewTransformationForCalculation, "BirdsEyeViewTransformationForCalculation.txt");

            side    = 8f;
            bottom  = 700.0f;
            centerX = (float)m_CameraParameters.Intrinsic.Cx;

            PointF[] physicalPointsForUI = new PointF[4];
            physicalPointsForUI[0] = new PointF(-3 * side + centerX, bottom - 8 * side);
            physicalPointsForUI[1] = new PointF(+3 * side + centerX, bottom - 8 * side);
            physicalPointsForUI[2] = new PointF(-3 * side + centerX, bottom);
            physicalPointsForUI[3] = new PointF(+3 * side + centerX, bottom);

            m_BirdsEyeViewTransformationForUI = CameraCalibration.GetPerspectiveTransform(outerCorners, physicalPointsForUI);
            HomographyMatrixSupport.Save(m_BirdsEyeViewTransformationForUI, "BirdsEyeViewTransformationForUI.txt");

            //m_BirdsEyeViewTransformationForCalculation.ProjectPoints(outerCorners);

            CreateAndDrawBirdsEyeView();
        }
Beispiel #4
0
        public void homographie(List <IntPoint> LimiteTerain, bool imgCol)
        {
            /* AFORGE => OK Mais long
             * UnImgReel = UnmanagedImage.FromManagedImage(imgReel);
             *
             * // Remplacement de l'image par le terain détecte dedans
             * if (LimiteTerain.Count == 4)
             * {
             *   QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(LimiteTerain, UnImgReel.Width, UnImgReel.Height);
             *   UnImgReel = quadrilateralTransformation.Apply(UnImgReel);
             * }
             */

            if (LimiteTerain.Count == 4)
            {
                int wid = max(LimiteTerain[0].X, LimiteTerain[1].X, LimiteTerain[2].X, LimiteTerain[3].X) - min(LimiteTerain[0].X, LimiteTerain[1].X, LimiteTerain[2].X, LimiteTerain[3].X);
                int hei = max(LimiteTerain[0].Y, LimiteTerain[1].Y, LimiteTerain[2].Y, LimiteTerain[3].Y) - min(LimiteTerain[0].Y, LimiteTerain[1].Y, LimiteTerain[2].Y, LimiteTerain[3].Y);

                Image <Bgr, Byte> a    = new Image <Bgr, byte>(wid, hei);
                PointF[]          pts1 = new PointF[4];
                PointF[]          pts2 = new PointF[4];

                pts1[0] = new PointF(0, 0);
                pts1[1] = new PointF(wid, 0);
                pts1[3] = new PointF(wid, hei);
                pts1[2] = new PointF(0, hei);

                pts2[0] = new PointF(LimiteTerain[0].X, LimiteTerain[0].Y);
                pts2[1] = new PointF(LimiteTerain[1].X, LimiteTerain[1].Y);
                pts2[3] = new PointF(LimiteTerain[2].X, LimiteTerain[2].Y);
                pts2[2] = new PointF(LimiteTerain[3].X, LimiteTerain[3].Y);

                homography = CameraCalibration.GetPerspectiveTransform(pts2, pts1);
                MCvScalar s = new MCvScalar(0, 0, 0);

                //CvInvoke.cvFindHomography(matSource, matDest, homography, Emgu.CV.CvEnum.HOMOGRAPHY_METHOD.DEFAULT, 3.0, maskMat);
                CvInvoke.cvWarpPerspective(imgRecu, a, homography, (int)Emgu.CV.CvEnum.INTER.CV_INTER_NN, s);
                // CvInvoke.cvWarpAffine(imgRecu, a, homography, (int)Emgu.CV.CvEnum.INTER.CV_INTER_NN, s);

                imgRecu   = a;
                UnImgReel = UnmanagedImage.FromManagedImage(a.ToBitmap());
            }
            else
            {
                UnImgReel = UnmanagedImage.FromManagedImage(imgReel);
            }


            if (imgCol)
            {
                ImgColor = UnImgReel.Clone();
            }
        }
Beispiel #5
0
        /// <summary>
        /// Recover the homography matrix using RANDSAC. If the matrix cannot be recovered, null is returned.
        /// </summary>
        /// <param name="matchedFeatures">The Matched Features, only the first ModelFeature will be considered</param>
        /// <returns>The homography matrix, if it cannot be found, null is returned</returns>
        public static HomographyMatrix GetHomographyMatrixFromMatchedFeatures(MatchedSURFFeature[] matchedFeatures)
        {
            if (matchedFeatures.Length < 4)
            {
                return(null);
            }

            HomographyMatrix homography;

            if (matchedFeatures.Length < _randsacRequiredMatch)
            { // Too few points for randsac, use 4 points only
                PointF[] pts1 = new PointF[4];
                PointF[] pts2 = new PointF[4];
                for (int i = 0; i < 4; i++)
                {
                    pts1[i] = matchedFeatures[i].SimilarFeatures[0].Feature.Point.pt;
                    pts2[i] = matchedFeatures[i].ObservedFeature.Point.pt;
                }
                homography = CameraCalibration.GetPerspectiveTransform(pts1, pts2);
            }
            else
            {
                //use randsac to find the Homography Matrix
                PointF[] pts1 = new PointF[matchedFeatures.Length];
                PointF[] pts2 = new PointF[matchedFeatures.Length];
                for (int i = 0; i < matchedFeatures.Length; i++)
                {
                    pts1[i] = matchedFeatures[i].SimilarFeatures[0].Feature.Point.pt;
                    pts2[i] = matchedFeatures[i].ObservedFeature.Point.pt;
                }

                homography = CameraCalibration.FindHomography(
                    pts1, //points on the model image
                    pts2, //points on the observed image
                    CvEnum.HOMOGRAPHY_METHOD.RANSAC,
                    3);
                if (homography == null)
                {
                    return(null);
                }
            }

            if (homography.IsValid(10))
            {
                return(homography);
            }
            else
            {
                homography.Dispose();
                return(null);
            }
        }
        /// <summary>
        /// Return perspective image crop, by 4 points.
        /// </summary>
        /// <param name="Image">Input image</param>
        /// <param name="OutImageSize">Output image size.</param>
        /// <param name="CropPoints"></param>
        /// <returns></returns>
        public static Image <Bgr, Byte> GetPerspectiveFlatImage(Image <Bgr, Byte> Image, Size OutImageSize, PointF[] CropPoints)
        {
            // Image size
            PointF[] destinationPoints = new PointF[4];
            destinationPoints[0] = new PointF(0, 0);
            destinationPoints[1] = new PointF(OutImageSize.Width, 0);
            destinationPoints[2] = new PointF(OutImageSize.Width, OutImageSize.Height);
            destinationPoints[3] = new PointF(0, OutImageSize.Height);

            // Generate wrap matrix.
            HomographyMatrix myWarpMatrix = CameraCalibration.GetPerspectiveTransform(CropPoints, destinationPoints);

            // Generateimage.
            Image <Bgr, byte> newImage = Image.WarpPerspective(myWarpMatrix, Emgu.CV.CvEnum.INTER.CV_INTER_NN, Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS, new Bgr(20, 20, 20));

            newImage.ROI = new Rectangle(new Point(0, 0), OutImageSize);

            // Return the result image.
            return(newImage);
        }