示例#1
0
    void Update()
    {
        ros.Render();

        if (!_running)
        {
            return;
        }

        timer -= Time.deltaTime;

        if (timer <= 0)
        {
            timer = pollRate;

            PointMsg               point  = new PointMsg(1, 2, 3);
            QuaternionMsg          quat   = new QuaternionMsg(1, 2, 3, 4);
            PoseMsg                pose   = new PoseMsg(point, quat);
            PoseWithCovarianceMsg  posec  = new PoseWithCovarianceMsg(pose);
            Vector3Msg             vec3   = new Vector3Msg(1, 2, 3);
            TwistMsg               twist  = new TwistMsg(vec3, vec3);
            TwistWithCovarianceMsg twistc = new TwistWithCovarianceMsg(twist, new double[36]);
            HeaderMsg              header = new HeaderMsg(1, new TimeMsg(1, 1), "0");

            PoseStampedMsg ps  = new PoseStampedMsg(header, pose);
            PathMsg        msg = new PathMsg(header, new PoseStampedMsg[] { ps, ps, ps });

            BoolMsg             boolmsg = new BoolMsg(true);
            StringMsg           str     = new StringMsg("This is a test");
            RegionOfInterestMsg roi     = new RegionOfInterestMsg(0, 1, 2, 3, true);
            CameraInfoMsg       caminfo = new CameraInfoMsg(header, 100, 200, "plumb_bob", new double[5], new double[9], new double[9], new double[12], 14, 123, roi);

            _genericPub.PublishData(caminfo);
        }
    }
示例#2
0
        public static Texture2D ApplyCameraInfoProjection(this Texture2D tex, CameraInfoMsg cameraInfo)
        {
            var newTex = new Texture2D(tex.width, tex.height);

            double[][] formatK = new double[][] {
                new double[] { cameraInfo.k[0], cameraInfo.k[1], cameraInfo.k[2] },
                new double[] { cameraInfo.k[3], cameraInfo.k[4], cameraInfo.k[5] },
                new double[] { cameraInfo.k[6], cameraInfo.k[7], cameraInfo.k[8] },
            };

            var inverseK = MatrixExtensions.MatrixInverse(formatK);

            for (int i = 0; i < tex.width; i++)
            {
                for (int j = 0; j < tex.height; j++)
                {
                    var newPix = FindPixelProjection(i, j, cameraInfo.p, inverseK);
                    var color  = tex.GetPixel((int)newPix.x, (int)newPix.y);
                    newTex.SetPixel(i, j, color);
                }
            }
            newTex.Apply();

            return(newTex);
        }
示例#3
0
 private void Initialise()
 {
     ros         = new ROSBridgeWebSocketConnection("ws://192.168.255.40", 9090, "Test");
     _genericPub = new ROSGenericPublisher(ros, "/raspicam_node/camera_info", CameraInfoMsg.GetMessageType());
     _genericSub = new ROSGenericSubscriber <CameraInfoMsg>(ros, "/raspicam_node/camera_info", CameraInfoMsg.GetMessageType(), msg => new CameraInfoMsg(msg));
     _genericSub.OnDataReceived += OnDataReceived;
     ros.Connect();
     ros.Render();
     StartCoroutine(WaitForRunning());
 }
示例#4
0
    protected override void PublishSensorData()
    {
        _camera.targetTexture = _renderTexture;
        _camera.Render();
        RenderTexture.active = _renderTexture;
        _texture2D.ReadPixels(_rect, 0, 0);
        _camera.targetTexture = null;
        RenderTexture.active  = null;


        HeaderMsg           header  = new HeaderMsg(_sequenceId, new TimeMsg(0, 0), "raspicam");
        RegionOfInterestMsg roi     = new RegionOfInterestMsg(0, 0, 0, 0, false);
        CameraInfoMsg       camInfo = new CameraInfoMsg(header, (uint)_resolutionHeight, (uint)_resolutionWidth, "plumb_bob", Distortion, CameraMatrix, Rectification, Projection, 0, 0, roi);

        _cameraSensorPublisher.PublishData(_texture2D, _cameraQualityLevel, _sequenceId);
        _cameraInfoPublisher.PublishData(camInfo);

        _sequenceId++;
    }
示例#5
0
        public static PointMsg[] GetPixelsInWorld(this CameraInfoMsg cameraInfo)
        {
            List <PointMsg> res = new List <PointMsg>();

            double[][] formatK = new double[][] {
                new double[] { cameraInfo.k[0], cameraInfo.k[1], cameraInfo.k[2] },
                new double[] { cameraInfo.k[3], cameraInfo.k[4], cameraInfo.k[5] },
                new double[] { cameraInfo.k[6], cameraInfo.k[7], cameraInfo.k[8] },
            };

            var inverseK = MatrixExtensions.MatrixInverse(formatK);

            for (int i = 0; i < cameraInfo.width; i++)
            {
                for (int j = 0; j < cameraInfo.height; j++)
                {
                    res.Add(FindPixelProjection(i, j, cameraInfo.p, inverseK));
                }
            }

            return(res.ToArray());
        }
        /**
         * <summary>
         *   <para>Using a Unity Camera and a provided HeaderMsg, generate a corresponding CameraInfoMsg,
         *  see http://docs.ros.org/en/noetic/api/sensor_msgs/html/msg/CameraInfo.html for more information.</para>
         * </summary>
         *
         * <param name="unityCamera">The camera used to generate the message.</param>
         * <param name="header">The header component of this message.</param>
         * <param name="horizontalCameraOffsetDistanceMeters">Meters, For use with stereo cameras,
         * Typically the offset of the right camera from the left.</param>
         * <param name="integerResolutionTolerance">The CameraInfoMsg stores resolution as a uint. However in Unity the
         * pixelRect that represents the resolution is a Rect allowing for floating point precision resolution.
         * This parameter will cause debug warnings if after converting a resolution to unsigned integers
         * the difference between the new value and the original is greater than integerResolutionTolerance.
         * If you want to bypass this warning, set the value to 1.0f</param>
         *
         * Not Yet Supported, but might be worth supporting:
         * -> Camera Resolution Scaling.
         * -> Non Zero Vertical Camera Offset.
         * -> Lens Shift.
         *
         * Not Yet Supported:
         * -> Distortion Models.
         * -> Rectification Matrix (Other than the identity).
         * -> Axis Skew.
         *
         */
        public static CameraInfoMsg ConstructCameraInfoMessage(Camera unityCamera, HeaderMsg header,
                                                               float horizontalCameraOffsetDistanceMeters = 0.0f, float integerResolutionTolerance = 0.01f)
        {
            var cameraInfo = new CameraInfoMsg {
                header = header
            };

            Rect pixelRect        = unityCamera.pixelRect;
            var  resolutionWidth  = (uint)pixelRect.width;
            var  resolutionHeight = (uint)pixelRect.height;

            //Check whether the resolution is an integer value, if not, raise a warning.
            //Note: While the resolution of a screen or a render texture is always an integer value,
            //      one can change the rendering region within the screen / texture using the
            //      viewport rect. It is possible that this region will be a non-integer resolution.
            //      since the resolution of the CameraInfo message is stored as a uint,
            //      non-integer values are not supported
            if ((pixelRect.width - (float)resolutionWidth) > integerResolutionTolerance)
            {
                Debug.LogWarning($"CameraInfoMsg for camera with name {unityCamera.gameObject.name}, " +
                                 $"Resolution width is not an integer: {pixelRect.width}. Adjust the viewport rect.");
            }

            if ((pixelRect.height - (float)resolutionHeight) > integerResolutionTolerance)
            {
                Debug.LogWarning($"CameraInfoMsg for camera with name {unityCamera.gameObject.name}, " +
                                 $"Resolution height is not an integer: {pixelRect.height}. Adjust the viewport rect.");
            }

            if (resolutionWidth != unityCamera.scaledPixelWidth || resolutionHeight != unityCamera.scaledPixelHeight)
            {
                //Check for resolution scaling (Not Implemented). TODO - Implement.
                throw new NotImplementedException(
                          $"Unable to construct CameraInfoMsg for camera with name {unityCamera.gameObject.name}, " +
                          $"Resolution scaling is not yet supported.");
            }

            if (unityCamera.lensShift != Vector2.zero)
            {
                throw new NotImplementedException(
                          $"Unable to construct CameraInfoMsg for camera with name {unityCamera.gameObject.name}, " +
                          "Lens shift is not yet supported.");
            }

            cameraInfo.width  = resolutionWidth;
            cameraInfo.height = resolutionHeight;

            //Focal center currently assumes zero lens shift.
            //Focal center x.
            double cX = resolutionWidth / 2.0;
            //Focal center y.
            double cY = resolutionHeight / 2.0;

            //Get the vertical field of view of the camera taking into account any physical camera settings.
            float verticalFieldOfView = GetVerticalFieldOfView(unityCamera);

            //Sources
            //http://paulbourke.net/miscellaneous/lens/
            //http://ksimek.github.io/2013/06/18/calibrated-cameras-and-gluperspective/
            //Rearranging the equation for verticalFieldOfView given a focal length, determine the focal length in pixels.
            double focalLengthInPixels =
                (resolutionHeight / 2.0) / Math.Tan((Mathf.Deg2Rad * verticalFieldOfView) / 2.0);

            //As this is a perfect pinhole camera, the fx = fy = f
            //Source http://ksimek.github.io/2013/08/13/intrinsic/
            //Focal Length (x)
            double fX = focalLengthInPixels;
            //Focal Length (y)
            double fY = focalLengthInPixels;

            //Source: http://docs.ros.org/en/noetic/api/sensor_msgs/html/msg/CameraInfo.html
            //For a single camera, tX = tY = 0.
            //For a stereo camera, assuming Tz = 0, Ty = 0 and Tx = -fx' * B (for the second camera)
            double baseline = horizontalCameraOffsetDistanceMeters;

            double tX = -fX * baseline;
            double tY = 0.0;

            //Axis Skew, Assuming none.
            double s = 0.0;

            //http://ksimek.github.io/2013/08/13/intrinsic/
            cameraInfo.k = new double[]
            {
                fX, s, cX,
                0, fY, cY,
                0, 0, 1
            };

            //The distortion parameters, size depending on the distortion model.
            //For "plumb_bob", the 5 parameters are: (k1, k2, t1, t2, k3).
            //No distortion means d = {k1, k2, t1, t2, k3} = {0, 0, 0, 0, 0}
            cameraInfo.distortion_model = k_PlumbBobDistortionModel;
            cameraInfo.d = new double[]
            {
                0.0, //k1
                0.0, //k2
                0.0, //t1
                0.0, //t2
                0.0  //k3
            };

            //Rectification matrix (stereo cameras only)
            //A rotation matrix aligning the camera coordinate system to the ideal
            //stereo image plane so that epipolar lines in both stereo images are
            //parallel.
            cameraInfo.r = new double[]
            {
                1, 0, 0,
                0, 1, 0,
                0, 0, 1
            };


            //Projection/camera matrix
            //     [fx'  0  cx' Tx]
            // P = [ 0  fy' cy' Ty]
            //     [ 0   0   1   0]
            cameraInfo.p = new double[]
            {
                fX, 0, cX, tX,
                0, fY, cY, tY,
                0, 0, 1, 0
            };

            //We're not worrying about binning...
            cameraInfo.binning_x = 0;
            cameraInfo.binning_y = 0;

            cameraInfo.roi = new RegionOfInterestMsg(); //Just the default values here.

            return(cameraInfo);
        }
示例#7
0
    public override void Initialise(ROSBridgeWebSocketConnection rosBridge)
    {
        _texture2D     = new Texture2D(_resolutionWidth, _resolutionHeight, TextureFormat.RGB24, false);
        _rect          = new Rect(0, 0, _resolutionWidth, _resolutionHeight);
        _renderTexture = new RenderTexture(_resolutionWidth, _resolutionHeight, 24);

        _cameraInfoPublisher   = new ROSGenericPublisher(rosBridge, "/raspicam_node/camera_info", CameraInfoMsg.GetMessageType());
        _cameraSensorPublisher = new ROSCameraSensorPublisher(rosBridge, "/raspicam_node/image/compressed");

        base.Initialise(rosBridge);
    }