/// <summary>
        /// Compute pattern pose using PnP algorithm
        /// </summary>
        /// <param name="pattern"></param>
        /// <param name="calibration"></param>
        public void ComputePose(Pattern pattern, CameraCalibrationInfo calibration)
        {
            VectorOfFloat rotationVector32f    = new VectorOfFloat();
            VectorOfFloat translationVector32f = new VectorOfFloat();
            Mat           rotationVector       = new Mat();
            Mat           translationVector    = new Mat();

            var px1 = pattern.Points3d.ToArray();
            var px2 = Array.ConvertAll <Point, PointF>(_points2d.ToArray(), (a) => { return(a); });

            CvInvoke.SolvePnP(px1, px2, calibration.Intrinsic, calibration.Distortion, rotationVector, translationVector);

            //var p1 = new Matrix<double>(rotationVector.Rows, rotationVector.Cols, rotationVector.Ptr);
            //var p2 = new Matrix<double>(translationVector.Rows, translationVector.Cols, translationVector.Ptr);

            rotationVector.ConvertTo(rotationVector32f, DepthType.Cv32F);
            translationVector.ConvertTo(translationVector32f, DepthType.Cv32F);

            Matrix <double> rotationMat = new Matrix <double>(3, 3);

            CvInvoke.Rodrigues(rotationVector32f, rotationMat);

            // Copy to transformation matrix
            for (int col = 0; col < 3; col++)
            {
                for (int row = 0; row < 3; row++)
                {
                    _pose3d.SetRotationMatrixValue(row, col, (float)rotationMat[row, col]); // Copy rotation component
                }
                _pose3d.SetTranslationVectorValue(col, translationVector32f[col]);          // Copy translation component
            }

            // Since solvePnP finds camera location, w.r.t to marker pose, to get marker pose w.r.t to the camera we invert it.
            _pose3d = _pose3d.GetInverted();
        }
Exemple #2
0
        private Matrix4 BuildProjectionMatrix(CameraCalibrationInfo calibration, int screen_width, int screen_height)
        {
            float nearPlane = 0.01f;  // Near clipping distance
            float farPlane  = 100.0f; // Far clipping distance

            // Camera parameters
            double fX = calibration.Fx; // Focal length in x axis
            double fY = calibration.Fy; // Focal length in y axis (usually the same?)
            double cX = calibration.Cx; // Camera primary point x
            double cY = calibration.Cy; // Camera primary point y

            var projectionMatrix = new Matrix4(
                (float)(-2.0 * fX / screen_width),
                0.0f,
                0.0f,
                0.0f,
                //----------
                0.0f,
                (float)(2.0f * fY / screen_height),
                0.0f,
                0.0f,
                //----------
                (float)(2.0f * cX / screen_width - 1.0f),
                (float)(2.0f * cY / screen_height - 1.0f),
                -(farPlane + nearPlane) / (farPlane - nearPlane),
                -1.0f,
                //----------
                0.0f,
                0.0f,
                -2.0f * farPlane * nearPlane / (farPlane - nearPlane),
                0.0f
                );

            return(projectionMatrix);
        }
Exemple #3
0
        public GameWindow(CameraCalibrationInfo calibration, Mat img)
        // set window resolution, title, and default behaviour
            : base(img.Width, img.Height, GraphicsMode.Default, "OpenTK Intro",
                   GameWindowFlags.Default, DisplayDevice.Default,
                   // ask for an OpenGL 3.0 forward compatible context
                   3, 0, GraphicsContextFlags.ForwardCompatible)
        {
            _calibration     = calibration;
            _backgroundImage = img;

            // init capture buffer
            _captureBuffer = new ConcurrentQueue <ProcessedFrame>();

            Task.Run(() =>
            {
                int counter             = 0;
                Transformation lastPose = new Transformation();
                while (true)
                {
                    if (Capture != null)
                    {
                        var frame = Capture.QueryFrame();
                        if (frame == null)
                        {
                            break;
                        }
                        if (counter % 8 == 0)
                        {
                            var processedFrame = ProcessFrame(frame);
                            lastPose           = processedFrame.PatternPose;
                            _captureBuffer.Enqueue(processedFrame);
                        }
                        else
                        {
                            _captureBuffer.Enqueue(new ProcessedFrame {
                                PatternPose = lastPose, IsPatternPresent = true, Image = frame
                            });
                        }

                        counter++;

                        if (_captureBuffer.Count > 50)
                        {
                            _render = true;
                        }
                    }
                }
            });

            Console.WriteLine("gl version: " + GL.GetString(StringName.Version));
        }
        public static void Run(string path, string patternPath, SourceType type)
        {
            FeaturesUtils.Init();

            var calibration = new CameraCalibrationInfo(560.764656335266f, 562.763179958161f, 295.849138757436f, 255.022208986073f);

            var patternImage    = CvInvoke.Imread(patternPath, Emgu.CV.CvEnum.LoadImageType.Unchanged);
            var patternDetector = new PatternDetector(patternImage);

            if (type == SourceType.Image)
            {
                var image = CvInvoke.Imread(path, Emgu.CV.CvEnum.LoadImageType.Unchanged);
                ShowWindow(image, patternImage, patternDetector, calibration);
            }
            else if (type == SourceType.Video)
            {
                var capture = new Capture(path);
                var image   = capture.QueryFrame();
                ShowWindow(image, patternImage, patternDetector, calibration, capture);
            }
        }
        private static void ShowWindow(Mat img, Mat patternImage, PatternDetector patternDetector, CameraCalibrationInfo calibration, Capture capture = null)
        {
            double fps = capture != null?capture.GetCaptureProperty(CapProp.Fps) : 30;

            using (var window = new GameWindow(calibration, img))
            {
                window.PatternDetector = patternDetector;
                window.Pattern         = patternImage;
                window.Capture         = capture;
                window.Run(fps);
            }
        }