示例#1
0
        /// <summary>
        /// insert a stereo ray into the grid
        /// </summary>
        /// <param name="ray">stereo ray</param>
        /// <param name="sensormodel">sensor model for each grid level</param>
        /// <param name="left_camera_location">left stereo camera position and orientation</param>
        /// <param name="right_camera_location">right stereo camera position and orientation</param>
        /// <param name="localiseOnly">whether we are mapping or localising</param>
        /// <returns>localisation matching score</returns>
        public float Insert(
            evidenceRay ray,
            stereoModel[] sensormodel,
            pos3D left_camera_location,
            pos3D right_camera_location,
            bool localiseOnly)
        {
            float matchingScore = occupancygridBase.NO_OCCUPANCY_EVIDENCE;

            switch (grid_type)
            {
            case TYPE_SIMPLE:
            {
                for (int grid_level = 0; grid_level < grid.Length; grid_level++)
                {
                    rayModelLookup      sensormodel_lookup = sensormodel[grid_level].ray_model;
                    occupancygridSimple grd = (occupancygridSimple)grid[grid_level];
                    float score             = grd.Insert(ray, sensormodel_lookup, left_camera_location, right_camera_location, localiseOnly);

                    if (score != occupancygridBase.NO_OCCUPANCY_EVIDENCE)
                    {
                        if (matchingScore == occupancygridBase.NO_OCCUPANCY_EVIDENCE)
                        {
                            matchingScore = 0;
                        }
                        matchingScore += grid_weight[grid_level] * score;
                    }
                }
                break;
            }
            }
            return(matchingScore);
        }
示例#2
0
        /// <summary>
        /// insert a stereo ray into the grid
        /// </summary>
        /// <param name="ray">stereo ray</param>
        /// <param name="origin">pose from which this observation was made</param>
        /// <param name="sensormodel">sensor model for each grid level</param>
        /// <param name="left_camera_location">left stereo camera position and orientation</param>
        /// <param name="right_camera_location">right stereo camera position and orientation</param>
        /// <param name="localiseOnly">whether we are mapping or localising</param>
        /// <returns>localisation matching score</returns>
        public float Insert(
            evidenceRay ray,
            particlePose origin,
            stereoModel[] sensormodel,
            pos3D left_camera_location,
            pos3D right_camera_location,
            bool localiseOnly)
        {
            float matchingScore = 0;

            switch (grid_type)
            {
            case TYPE_MULTI_HYPOTHESIS:
            {
                for (int grid_level = 0; grid_level < grid.Length; grid_level++)
                {
                    rayModelLookup sensormodel_lookup = sensormodel[grid_level].ray_model;
                    occupancygridMultiHypothesis grd  = (occupancygridMultiHypothesis)grid[grid_level];
                    matchingScore += grid_weight[grid_level] * grd.Insert(ray, origin, sensormodel_lookup, left_camera_location, right_camera_location, localiseOnly);
                }
                break;
            }
            }
            return(matchingScore);
        }
        /// <summary>
        /// update the robot's map
        /// </summary>
        /// <param name="state">robot state</param>
        private static void Update(ThreadMappingState state)
        {
            // object used for taking benchmark timings
            stopwatch clock = new stopwatch();

            clock.Start();

            // update all current poses with the observed rays
            state.motion.LocalGrid = state.grid;
            state.motion.AddObservation(state.stereo_rays, false);

            clock.Stop();
            state.benchmark_observation_update = clock.time_elapsed_mS;

            // what's the relative position of the robot inside the grid ?
            pos3D relative_position = new pos3D(state.pose.x - state.grid.x, state.pose.y - state.grid.y, 0);

            relative_position.pan = state.pose.pan - state.grid.pan;

            clock.Start();

            // garbage collect dead occupancy hypotheses
            state.grid.GarbageCollect();

            clock.Stop();
            state.benchmark_garbage_collection = clock.time_elapsed_mS;
        }
示例#4
0
        public void show(Byte[] img, int img_width, int img_height, int scale)
        {
            if (viewpoints.Count > 0)
            {
                // find the centre of the path
                pos3D centre = pathCentre();

                // insert the viewpoints
                int prev_x = 0;
                int prev_y = 0;
                for (int v = 0; v < viewpoints.Count; v++)
                {
                    viewpoint view = (viewpoint)viewpoints[v];
                    float     x    = view.odometry_position.x - centre.x;
                    float     y    = view.odometry_position.y - centre.y;

                    int screen_x = (int)(x * scale / 10000.0f * img_width / 640) + (img_width / 2);
                    int screen_y = (int)(y * scale / 10000.0f * img_height / 480) + (img_height / 2);

                    if (v > 0)
                    {
                        util.drawLine(img, img_width, img_height, screen_x, screen_y, prev_x, prev_y, 0, 255, 0, v * 4 / viewpoints.Count, false);
                    }

                    prev_x = screen_x;
                    prev_y = screen_y;
                }
            }
        }
示例#5
0
        /// <summary>
        /// update the robot's map
        /// </summary>
        /// <param name="state">robot state</param>
        private static void Update(ThreadMappingState state)
        {
            // object used for taking benchmark timings
            stopwatch clock = new stopwatch();

            clock.Start();

            // update all current poses with the observed rays
            state.motion.LocalGrid = state.grid;
            state.motion.AddObservation(state.stereo_rays, false);            

            clock.Stop();
            state.benchmark_observation_update = clock.time_elapsed_mS;

            // what's the relative position of the robot inside the grid ?
            pos3D relative_position = new pos3D(state.pose.x - state.grid.x, state.pose.y - state.grid.y, 0);
            relative_position.pan = state.pose.pan - state.grid.pan;

            clock.Start();

            // garbage collect dead occupancy hypotheses
            state.grid.GarbageCollect();

            clock.Stop();
            state.benchmark_garbage_collection = clock.time_elapsed_mS;
        }
示例#6
0
		public void SubtractPoints()
		{
		    pos3D[] point = new pos3D[3];
		    for (int i = 0; i < 2; i++) point[i] = new pos3D(0,0,0);
		    
		    point[0].x = 10;
		    point[0].y = 20;
		    point[0].z = 30;
		    point[0].pan = 1.4f;
		    point[0].tilt = 0.01f;
		    point[0].roll = 0.002f;

   		    point[1].x = 40;
		    point[1].y = 50;
		    point[1].z = 60;
		    point[1].pan = 2.1f;
		    point[1].tilt = 0.03f;
		    point[1].roll = 0.001f;
		    
		    point[2] = point[0].subtract(point[1]);
		    
		    Assert.AreEqual(10 - 40, point[2].x, "x");
		    Assert.AreEqual(20 - 50, point[2].y, "y");
		    Assert.AreEqual(30 - 60, point[2].z, "z");
		    Assert.AreEqual(1.4f - 2.1f, point[2].pan, "pan");
		    Assert.AreEqual(0.01f - 0.03f, point[2].tilt, "tilt");
		    Assert.AreEqual(0.002f - 0.001f, point[2].roll, "roll");
		}
示例#7
0
        /// <summary>
        /// translate and rotate the ray by the given values
        /// </summary>
        /// <param name="r">object containing the desired translation and rotation</param>
        public void translateRotate(pos3D r)
        {
            //take a note of where the ray was observed from
            //in egocentric coordinates.  This will help to
            //restrict localisation searching
            observedFrom      = new pos3D(r.x, r.y, r.z);
            observedFrom.pan  = r.pan;
            observedFrom.tilt = r.tilt;

            //centre = centre.rotate(r.pan, r.tilt, r.roll);
            for (int v = 0; v < 2; v++)
            {
                vertices[v] = vertices[v].rotate(r.pan, r.tilt, r.roll);
            }

            //store the XY plane pan angle, which may later be used
            //to reduce the searching during localisation
            pan_angle  = vertices[0].new_pan_angle;
            pan_index  = (int)(pan_angle * pan_steps / (2 * 3.1415927f));
            start_dist = vertices[0].dist_xy;
            length     = vertices[1].dist_xy - start_dist;

            //centre = centre.translate(r.x, r.y, r.z);
            for (int v = 0; v < 2; v++)
            {
                vertices[v] = vertices[v].translate(r.x, r.y, r.z);
            }
        }
示例#8
0
		public void AddPoints()
		{
		    pos3D[] point = new pos3D[3];
		    for (int i = 0; i < 2; i++) point[i] = new pos3D(0,0,0);
		    
		    point[0].x = 10;
		    point[0].y = 20;
		    point[0].z = 30;
		    point[0].pan = 1.4f;
		    point[0].tilt = 0.01f;
		    point[0].roll = 0.002f;

   		    point[1].x = 40;
		    point[1].y = 50;
		    point[1].z = 60;
		    point[1].pan = 2.1f;
		    point[1].tilt = 0.03f;
		    point[1].roll = 0.001f;
		    
		    point[2] = point[0].add(point[1]);
		    
		    Assert.AreEqual(10 + 40, point[2].x, "x");
		    Assert.AreEqual(20 + 50, point[2].y, "y");
		    Assert.AreEqual(30 + 60, point[2].z, "z");
		    Assert.AreEqual(1.4f + 2.1f, point[2].pan, "pan");
		    Assert.AreEqual(0.01f + 0.03f, point[2].tilt, "tilt");
		    Assert.AreEqual(0.002f + 0.001f, point[2].roll, "roll");
		}
示例#9
0
 public pos3D subtract(pos3D other)
 {
     pos3D sum = new pos3D(x - other.x, y - other.y, z - other.z);
     sum.pan = pan - other.pan;
     sum.tilt = tilt - other.tilt;
     sum.roll = roll - other.roll;
     return (sum);
 }
示例#10
0
 public pos3D add(pos3D other)
 {
     pos3D sum = new pos3D(x + other.x, y + other.y, z + other.z);
     sum.pan = pan + other.pan;
     sum.tilt = tilt + other.tilt;
     sum.roll = roll + other.roll;
     return (sum);
 }
示例#11
0
 /// <summary>
 /// sets the position and orientation of the robots head
 /// </summary>
 /// <param name="p">
 /// A <see cref="pos3D"/>
 /// </param>
 public void SetHeadPositionOrientation(pos3D p)
 {
     head_centroid_x = p.x;
     head_centroid_y = p.y;
     head_centroid_z = p.z;
     head_pan        = p.pan;
     head_tilt       = p.tilt;
     head_roll       = p.roll;
 }
示例#12
0
        public pos3D add(pos3D other)
        {
            pos3D sum = new pos3D(x + other.x, y + other.y, z + other.z);

            sum.pan  = pan + other.pan;
            sum.tilt = tilt + other.tilt;
            sum.roll = roll + other.roll;
            return(sum);
        }
示例#13
0
        public pos3D Subtract(pos3D p)
        {
            pos3D new_p = new pos3D(x - p.x, y - p.y, z - p.z);

            new_p.pan  = pan - p.pan;
            new_p.tilt = tilt - p.tilt;
            new_p.roll = roll - p.roll;
            return(new_p);
        }
示例#14
0
 /// <summary>
 /// copy the position from another
 /// </summary>
 /// <param name="other"></param>
 /// <returns></returns>
 public void copyFrom(pos3D other)
 {
     x    = other.x;
     y    = other.y;
     z    = other.z;
     pan  = other.pan;
     tilt = other.tilt;
     roll = other.roll;
 }
示例#15
0
        /// <summary>
        /// return a translated version of the point
        /// </summary>
        /// <param name="x"></param>
        /// <param name="y"></param>
        /// <param name="z"></param>
        /// <returns></returns>
        public pos3D translate(float x, float y, float z)
        {
            pos3D result = new pos3D(this.x + x, this.y + y, this.z + z);

            result.pan  = pan;
            result.tilt = tilt;
            result.roll = roll;
            return(result);
        }
示例#16
0
        public pos3D subtract(pos3D other)
        {
            pos3D sum = new pos3D(x - other.x, y - other.y, z - other.z);

            sum.pan  = pan - other.pan;
            sum.tilt = tilt - other.tilt;
            sum.roll = roll - other.roll;
            return(sum);
        }
示例#17
0
        public pos3D rotate_old(float pan, float tilt, float roll)
        {
            float hyp;
            pos3D rotated = new pos3D(x,y,z);

            // roll            
            //if (roll != 0)
            {
                hyp = (float)Math.Sqrt((rotated.x * rotated.x) + (rotated.z * rotated.z));
                if (hyp > 0)
                {
                    float roll_angle = (float)Math.Acos(rotated.z / hyp);
                    if (rotated.x < 0) roll_angle = (float)(Math.PI * 2) - roll_angle;
                    float new_roll_angle = roll + roll_angle;
                    rotated.x = hyp * (float)Math.Sin(new_roll_angle);
                    rotated.z = hyp * (float)Math.Cos(new_roll_angle);
                }
            }
            if (tilt != 0)
            {
                // tilt
                hyp = (float)Math.Sqrt((rotated.y * rotated.y) + (rotated.z * rotated.z));
                if (hyp > 0)
                {
                    float tilt_angle = (float)Math.Acos(rotated.y / hyp);
                    if (rotated.z < 0) tilt_angle = (float)(Math.PI * 2) - tilt_angle;
                    float new_tilt_angle = tilt + tilt_angle;
                    rotated.y = hyp * (float)Math.Sin(new_tilt_angle);
                    rotated.z = hyp * (float)Math.Cos(new_tilt_angle);
                }
            }

            
            
            //if (pan != 0)
            {
                // pan                
                hyp = (float)Math.Sqrt((rotated.x * rotated.x) + (rotated.y * rotated.y));
                if (hyp > 0)
                {
                    float pan_angle = (float)Math.Acos(rotated.y / hyp);
                    if (rotated.x < 0) pan_angle = (float)(Math.PI * 2) - pan_angle;
                    rotated.new_pan_angle = pan - pan_angle;
                    rotated.dist_xy = hyp;
                    rotated.x = hyp * (float)Math.Sin(rotated.new_pan_angle);
                    rotated.y = hyp * (float)Math.Cos(rotated.new_pan_angle);
                }
            }
            rotated.pan = this.pan + pan;
            rotated.tilt = this.tilt + tilt;
            rotated.roll = this.roll + roll;
            return (rotated);
        }
示例#18
0
        /// <summary>
        /// localise and return offset values
        /// </summary>
        /// <param name="stereo_features">disparities for each stereo camera (x,y,disparity)</param>
        /// <param name="stereo_features_colour">colour for each disparity</param>
        /// <param name="stereo_features_uncertainties">uncertainty for each disparity</param>
        /// <param name="debug_mapping_filename">optional filename used for saving debugging info</param>
        /// <param name="known_offset_x_mm">ideal x offset, if known</param>
        /// <param name="known_offset_y_mm">ideal y offset, if known</param>
        /// <param name="offset_x_mm">returned x offset</param>
        /// <param name="offset_y_mm">returned y offset</param>
        /// <param name="offset_z_mm">returned z offset</param>
        /// <param name="offset_pan_radians">returned pan</param>
        /// <param name="offset_tilt_radians">returned tilt</param>
        /// <param name="offset_roll_radians">returned roll</param>
        /// <returns>true if the localisation was valid</returns>
        public bool Localise(
            float[][] stereo_features,
            byte[][,] stereo_features_colour,
            float[][] stereo_features_uncertainties,
            string debug_mapping_filename,
            float known_offset_x_mm,
            float known_offset_y_mm,
            ref float offset_x_mm,
            ref float offset_y_mm,
            ref float offset_z_mm,
            ref float offset_pan_radians,
            ref float offset_tilt_radians,
            ref float offset_roll_radians)
        {
            int   overall_img_width  = 640;
            int   overall_img_height = 480;
            bool  valid_localisation = true;
            pos3D pose_offset        = new pos3D(0, 0, 0);
            bool  buffer_transition  = false;

            float matching_score = buffer.Localise(
                robot_geometry,
                stereo_features,
                stereo_features_colour,
                stereo_features_uncertainties,
                rnd,
                ref pose_offset,
                ref buffer_transition,
                debug_mapping_filename,
                known_offset_x_mm,
                known_offset_y_mm,
                overall_map_filename,
                ref overall_map_img,
                overall_img_width,
                overall_img_height,
                overall_map_dimension_mm,
                overall_map_centre_x_mm,
                overall_map_centre_y_mm);

            if (matching_score == occupancygridBase.NO_OCCUPANCY_EVIDENCE)
            {
                valid_localisation = false;
            }

            offset_x_mm         = pose_offset.x;
            offset_y_mm         = pose_offset.y;
            offset_z_mm         = pose_offset.z;
            offset_pan_radians  = pose_offset.pan;
            offset_tilt_radians = pose_offset.tilt;
            offset_roll_radians = pose_offset.roll;

            return(valid_localisation);
        }
示例#19
0
        public void testLocalisation(robot rob,
                                     int localisation_track_index,
                                     int localisation_test_index,
                                     occupancygridMultiResolution grid,
                                     int ray_thickness,
                                     int no_of_trial_poses,
                                     bool pruneSurvey,
                                     int survey_diameter_mm,
                                     int randomSeed, int pruneThreshold, float survey_angular_offset,
                                     float momentum,
                                     ref float error_x, ref float error_y, ref float error_pan,
                                     pos3D estimatedPos)
        {
            robotPath pathLocalisation = getLocalisationTrack(localisation_track_index);
            robotPath pathMap          = getMappingTrack(localisation_track_index);

            viewpoint view_localisation = pathLocalisation.getViewpoint(localisation_test_index);
            viewpoint view_map          = pathMap.getViewpoint(localisation_test_index);
            float     max_score         = 0;
            // position estimate from odometry, which positions the robot within the grid
            // as an initial estimate from which to search
            pos3D     local_odometry_position = view_map.odometry_position.subtract(pathMap.pathCentre());
            ArrayList survey_results          = rob.sensorModelLocalisation.surveyXYP(view_localisation, grid, survey_diameter_mm, survey_diameter_mm,
                                                                                      no_of_trial_poses, ray_thickness, pruneSurvey, randomSeed,
                                                                                      pruneThreshold, survey_angular_offset,
                                                                                      local_odometry_position, momentum,
                                                                                      ref max_score);
            float peak_x   = 0;
            float peak_y   = 0;
            float peak_pan = 0;

            rob.sensorModelLocalisation.SurveyPeak(survey_results, ref peak_x, ref peak_y);
            //float[] peak = rob.sensorModel.surveyPan(view_map, view_localisation, separation_tollerance, ray_thickness, peak_x, peak_y);
            float[] peak = rob.sensorModelLocalisation.surveyPan(view_localisation, grid, ray_thickness, peak_x, peak_y);
            peak_pan = rob.sensorModelLocalisation.SurveyPeakPan(peak);

            float half_track_length = 1000;

            float dx = view_localisation.odometry_position.x - view_map.odometry_position.x;
            float dy = view_localisation.odometry_position.y - (view_map.odometry_position.y - half_track_length);
            float dp = view_localisation.odometry_position.pan - view_map.odometry_position.pan;

            error_x   = dx + peak_x;
            error_y   = dy + peak_y;
            error_pan = dp + peak_pan;
            error_pan = error_pan / (float)Math.PI * 180;

            estimatedPos.x   = view_map.odometry_position.x - peak_x;
            estimatedPos.y   = view_map.odometry_position.y - peak_y;
            estimatedPos.pan = view_map.odometry_position.pan - peak_pan;
        }
示例#20
0
        /// <summary>
        /// rotates this position, using Y forward convention
        /// </summary>
        /// <param name="pan">pan angle in radians</param>
        /// <param name="tilt">tilt angle in radians</param>
        /// <param name="roll">roll angle in radians</param>
        /// <returns>rotated position</returns>
        public pos3D rotate(float pan, float tilt, float roll)
        {
            float x2 = x;
            float y2 = y;
            float z2 = z;

            float x3 = x;
            float y3 = y;
            float z3 = z;

            // Rotation about the y axis
            if (roll != 0)
            {
                float roll2 = roll + (float)Math.PI;
                x3 = (float)((Math.Cos(roll2) * x2) + (Math.Sin(roll2) * z2));
                z3 = (float)(-(Math.Sin(roll) * x2) + (Math.Cos(roll) * z2));
                x2 = x3;
                z2 = z3;
            }

            // Rotatation about the x axis
            if (tilt != 0)
            {
                float tilt2 = tilt;
                z3 = (float)((Math.Cos(tilt2) * y2) - (Math.Sin(tilt2) * z2));
                y3 = (float)((Math.Sin(tilt2) * y2) + (Math.Cos(tilt2) * z2));
                y2 = y3;
                z2 = z3;
            }

            // Rotation about the z axis:
            if (pan != 0)
            {
                float pan2 = pan + (float)Math.PI;
                x3 = (float)((Math.Cos(pan2) * x2) - (Math.Sin(pan2) * y2));
                y3 = (float)((Math.Sin(pan) * x2) + (Math.Cos(pan) * y2));
            }

            pos3D rotated = new pos3D(x3, y3, z3);

            rotated.pan  = this.pan + pan;
            rotated.tilt = this.tilt + tilt;
            rotated.roll = this.roll + roll;
            return(rotated);
        }
示例#21
0
        public void CreateStereoCameras(
            int no_of_stereo_cameras,
            float cam_baseline_mm,
            float dist_from_centre_of_tilt_mm,
            int cam_image_width,
            int cam_image_height,
            float cam_FOV_degrees,
            float head_diameter_mm,
            float default_head_orientation_degrees)
        {
            this.head_diameter_mm = head_diameter_mm;
            pose = new pos3D[no_of_stereo_cameras];
            for (int i = 0; i < no_of_stereo_cameras; i++)
            {
                pose[i] = new pos3D(0, 0, 0);
            }

            baseline_mm              = new float[no_of_stereo_cameras];
            image_width              = new int[no_of_stereo_cameras];
            image_height             = new int[no_of_stereo_cameras];
            FOV_degrees              = new float[no_of_stereo_cameras];
            stereo_camera_position_x = new float[no_of_stereo_cameras];
            stereo_camera_position_y = new float[no_of_stereo_cameras];
            stereo_camera_position_z = new float[no_of_stereo_cameras];
            stereo_camera_pan        = new float[no_of_stereo_cameras];
            stereo_camera_tilt       = new float[no_of_stereo_cameras];
            stereo_camera_roll       = new float[no_of_stereo_cameras];
            left_camera_location     = new pos3D[no_of_stereo_cameras];
            right_camera_location    = new pos3D[no_of_stereo_cameras];

            for (int cam = 0; cam < no_of_stereo_cameras; cam++)
            {
                float cam_orientation = cam * (float)Math.PI * 2 / no_of_stereo_cameras;
                cam_orientation += default_head_orientation_degrees * (float)Math.PI / 180.0f;
                stereo_camera_position_x[cam] = head_diameter_mm * 0.5f * (float)Math.Sin(cam_orientation);
                stereo_camera_position_y[cam] = head_diameter_mm * 0.5f * (float)Math.Cos(cam_orientation);
                stereo_camera_position_z[cam] = dist_from_centre_of_tilt_mm;
                stereo_camera_pan[cam]        = cam_orientation;

                baseline_mm[cam]  = cam_baseline_mm;
                image_width[cam]  = cam_image_width;
                image_height[cam] = cam_image_height;
                FOV_degrees[cam]  = cam_FOV_degrees;
            }
        }
示例#22
0
        // find the centre of the path
        public pos3D pathCentre()
        {
            float centre_x = 0;
            float centre_y = 0;

            for (int v = 0; v < viewpoints.Count; v++)
            {
                viewpoint view = (viewpoint)viewpoints[v];
                centre_x += view.odometry_position.x;
                centre_y += view.odometry_position.y;
            }
            centre_x /= viewpoints.Count;
            centre_y /= viewpoints.Count;

            pos3D centre = new pos3D(centre_x, centre_y, 0);

            return(centre);
        }
示例#23
0
        /// <summary>
        /// calculate the position of the robots head and cameras for this pose
        /// </summary>
        /// <param name="rob">robot object</param>
        /// <param name="head_location">location of the centre of the head</param>
        /// <param name="camera_centre_location">location of the centre of each stereo camera</param>
        /// <param name="left_camera_location">location of the left camera within each stereo camera</param>
        /// <param name="right_camera_location">location of the right camera within each stereo camera</param>
        protected void calculateCameraPositions(
            robot rob,
            ref pos3D head_location,
            ref pos3D[] camera_centre_location,
            ref pos3D[] left_camera_location,
            ref pos3D[] right_camera_location)
        {
            // calculate the position of the centre of the head relative to
            // the centre of rotation of the robots body
            pos3D head_centroid = new pos3D(-(rob.BodyWidth_mm / 2) + rob.head.x,
                                            -(rob.BodyLength_mm / 2) + rob.head.y,
                                            rob.head.z);

            // location of the centre of the head on the grid map
            // adjusted for the robot pose and the head pan and tilt angle.
            // Note that the positions and orientations of individual cameras
            // on the head have already been accounted for within stereoModel.createObservation
            pos3D head_locn = head_centroid.rotate(rob.head.pan + pan, rob.head.tilt, 0);

            head_locn = head_locn.translate(x, y, 0);
            head_location.copyFrom(head_locn);

            for (int cam = 0; cam < rob.head.no_of_stereo_cameras; cam++)
            {
                // calculate the position of the centre of the stereo camera
                // (baseline centre point)
                pos3D camera_centre_locn = new pos3D(rob.head.calibration[cam].positionOrientation.x, rob.head.calibration[cam].positionOrientation.y, rob.head.calibration[cam].positionOrientation.y);
                camera_centre_locn          = camera_centre_locn.rotate(rob.head.calibration[cam].positionOrientation.pan + rob.head.pan + pan, rob.head.calibration[cam].positionOrientation.tilt, rob.head.calibration[cam].positionOrientation.roll);
                camera_centre_location[cam] = camera_centre_locn.translate(head_location.x, head_location.y, head_location.z);

                // where are the left and right cameras?
                // we need to know this for the origins of the vacancy models
                float half_baseline_length = rob.head.calibration[cam].baseline / 2;
                pos3D left_camera_locn     = new pos3D(-half_baseline_length, 0, 0);
                left_camera_locn = left_camera_locn.rotate(rob.head.calibration[cam].positionOrientation.pan + rob.head.pan + pan, rob.head.calibration[cam].positionOrientation.tilt, rob.head.calibration[cam].positionOrientation.roll);
                pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z);
                left_camera_location[cam]      = left_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z);
                right_camera_location[cam]     = right_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z);
                right_camera_location[cam].pan = left_camera_location[cam].pan;
            }
        }
示例#24
0
        public scanMatching[] scanmatch;         // simple scan matching

        /// <summary>
        /// constructor
        /// </summary>
        /// <param name="no_of_stereo_cameras">
        /// A <see cref="System.Int32"/>
        /// The number of stereo cameras
        /// </param>
        public stereoHead(int no_of_stereo_cameras) : base(0, 0, 0)
        {
            this.no_of_stereo_cameras = no_of_stereo_cameras;

            // create feature lists
            features = new stereoFeatures[no_of_stereo_cameras];

            // store filenames of the images for each camera
            imageFilename = new String[no_of_stereo_cameras * 2];

            // create the cameras
            cameraPosition = new pos3D[no_of_stereo_cameras];

            // calibration data
            calibration = new calibrationStereo[no_of_stereo_cameras];

            // create objects for each stereo camera
            for (int cam = 0; cam < no_of_stereo_cameras; cam++)
            {
                calibration[cam]    = new calibrationStereo();
                cameraPosition[cam] = new pos3D(0, 0, 0);
                features[cam]       = null; // new stereoFeatures();
            }

            // sensor models
            sensormodel = new rayModelLookup[no_of_stereo_cameras];

            //scan matching
            scanmatch = new scanMatching[no_of_stereo_cameras];

            /*
             * if (no_of_cameras == 4) initQuadCam();
             * if (no_of_cameras == 2) initDualCam();
             * if (no_of_cameras == 1) initSingleStereoCamera(false);
             */
        }
示例#25
0
        public scanMatching[] scanmatch;         // simple scan matching

		/// <summary>
		/// constructor
		/// </summary>
		/// <param name="no_of_stereo_cameras">
		/// A <see cref="System.Int32"/>
		/// The number of stereo cameras
		/// </param>
        public stereoHead(int no_of_stereo_cameras) : base(0,0,0)
        {
            this.no_of_stereo_cameras = no_of_stereo_cameras;
            
            // create feature lists
            features = new stereoFeatures[no_of_stereo_cameras];
            
            // store filenames of the images for each camera
            imageFilename = new String[no_of_stereo_cameras * 2];
            
            // create the cameras
            cameraPosition = new pos3D[no_of_stereo_cameras];
            
            // calibration data
            calibration = new calibrationStereo[no_of_stereo_cameras];
            
            // create objects for each stereo camera
            for (int cam = 0; cam < no_of_stereo_cameras; cam++)
            {
                calibration[cam] = new calibrationStereo();
                cameraPosition[cam] = new pos3D(0, 0, 0);
                features[cam] = null; // new stereoFeatures();
            }

            // sensor models
            sensormodel = new rayModelLookup[no_of_stereo_cameras];

            //scan matching
            scanmatch = new scanMatching[no_of_stereo_cameras];

            /*
            if (no_of_cameras == 4) initQuadCam();
            if (no_of_cameras == 2) initDualCam();
            if (no_of_cameras == 1) initSingleStereoCamera(false);
             */
        }
示例#26
0
        /// <summary>
        /// Mapping
        /// </summary>
        /// <param name="body_width_mm">width of the robot body in millimetres</param>
        /// <param name="body_length_mm">length of the robot body in millimetres</param>
        /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_pan">head pan angle in radians</param>
        /// <param name="head_tilt">head tilt angle in radians</param>
        /// <param name="head_roll">head roll angle in radians</param>
        /// <param name="baseline_mm">stereo camera baseline in millimetres</param>
        /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param>
        /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param>
        /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param>
        /// <param name="image_width">image width for each stereo camera</param>
        /// <param name="image_height">image height for each stereo camera</param>
        /// <param name="FOV_degrees">field of view for each stereo camera in degrees</param>
        /// <param name="stereo_features">stereo features (disparities) for each stereo camera</param>
        /// <param name="stereo_features_colour">stereo feature colours for each stereo camera</param>
        /// <param name="stereo_features_uncertainties">stereo feature uncertainties (priors) for each stereo camera</param>
        /// <param name="sensormodel">sensor model for each stereo camera</param>
        /// <param name="robot_pose">current estimated position and orientation of the robots centre of rotation</param>
        protected void Map(
		    float body_width_mm,
		    float body_length_mm,
		    float body_centre_of_rotation_x,
		    float body_centre_of_rotation_y,
		    float body_centre_of_rotation_z,
		    float head_centroid_x,
		    float head_centroid_y,
		    float head_centroid_z,
		    float head_pan,
		    float head_tilt,
		    float head_roll,
		    int stereo_camera_index,
		    float baseline_mm,
		    float stereo_camera_position_x,
		    float stereo_camera_position_y,
		    float stereo_camera_position_z,
		    float stereo_camera_pan,
		    float stereo_camera_tilt,
		    float stereo_camera_roll,
            int image_width,
            int image_height,
            float FOV_degrees,
		    float[] stereo_features,
		    byte[,] stereo_features_colour,
		    float[] stereo_features_uncertainties,
            stereoModel[][] sensormodel,
            pos3D robot_pose)
        {        
		    Parallel.For(0, 2, delegate(int i)
		    {			
                pos3D left_camera_location = null;
                pos3D right_camera_location = null;
                
                buffer[i].Map(
		            body_width_mm,
		            body_length_mm,
		            body_centre_of_rotation_x,
		            body_centre_of_rotation_y,
		            body_centre_of_rotation_z,
		            head_centroid_x,
		            head_centroid_y,
		            head_centroid_z,
		            head_pan,
		            head_tilt,
		            head_roll,
				    stereo_camera_index,
		            baseline_mm,
		            stereo_camera_position_x,
		            stereo_camera_position_y,
		            stereo_camera_position_z,
		            stereo_camera_pan,
		            stereo_camera_tilt,
		            stereo_camera_roll,
                    image_width,
                    image_height,
                    FOV_degrees,
		            stereo_features,
		            stereo_features_colour,
		            stereo_features_uncertainties,
                    sensormodel,
                    ref left_camera_location,
                    ref right_camera_location,
                    robot_pose);
            });
        }
示例#27
0
        /// <summary>
        /// Localisation
        /// </summary>
        /// <param name="body_width_mm">width of the robot body in millimetres</param>
        /// <param name="body_length_mm">length of the robot body in millimetres</param>
        /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_pan">head pan angle in radians</param>
        /// <param name="head_tilt">head tilt angle in radians</param>
        /// <param name="head_roll">head roll angle in radians</param>
        /// <param name="baseline_mm">stereo camera baseline in millimetres</param>
        /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param>
        /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param>
        /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param>
        /// <param name="image_width">image width for each stereo camera</param>
        /// <param name="image_height">image height for each stereo camera</param>
        /// <param name="FOV_degrees">field of view for each stereo camera in degrees</param>
        /// <param name="stereo_features">stereo features (disparities) for each stereo camera</param>
        /// <param name="stereo_features_colour">stereo feature colours for each stereo camera</param>
        /// <param name="stereo_features_uncertainties">stereo feature uncertainties (priors) for each stereo camera</param>
        /// <param name="sensormodel">sensor model for each stereo camera</param>
        /// <param name="left_camera_location">returned position and orientation of the left camera on each stereo camera</param>
        /// <param name="right_camera_location">returned position and orientation of the right camera on each stereo camera</param>
        /// <param name="no_of_samples">number of sample poses</param>
        /// <param name="sampling_radius_major_mm">major radius for samples, in the direction of robot movement</param>
        /// <param name="sampling_radius_minor_mm">minor radius for samples, perpendicular to the direction of robot movement</param>
        /// <param name="robot_pose">current estimated position and orientation of the robots centre of rotation, for each stereo camera observation</param>
        /// <param name="max_orientation_variance">maximum variance in orientation in radians, used to create sample poses</param>
        /// <param name="max_tilt_variance">maximum variance in tilt angle in radians, used to create sample poses</param>
        /// <param name="max_roll_variance">maximum variance in roll angle in radians, used to create sample poses</param>
        /// <param name="poses">list of poses tried</param>
        /// <param name="pose_score">list of pose matching scores</param>
        /// <param name="pose_offset">offset of the best pose from the current one</param>
		/// <param name="rnd">random number generator</param>
        /// <param name="buffer_transition">have we transitioned to the next grid buffer?</param>
        /// <param name="overall_map_filename">filename to save an overall map of the path</param>
        /// <param name="overall_map_img">overall map image data</param>
        /// <param name="overall_map_dimension_mm">dimension of the overall map in millimetres</param>
        /// <param name="overall_map_centre_x_mm">centre x position of the overall map in millimetres</param>
        /// <param name="overall_map_centre_y_mm">centre x position of the overall map in millimetres</param>
        /// <returns>best localisation matching score</returns>
        protected float Localise(
		    float body_width_mm,
		    float body_length_mm,
		    float body_centre_of_rotation_x,
		    float body_centre_of_rotation_y,
		    float body_centre_of_rotation_z,
		    float head_centroid_x,
		    float head_centroid_y,
		    float head_centroid_z,
		    float head_pan,
		    float head_tilt,
		    float head_roll,
		    float[] baseline_mm,
		    float[] stereo_camera_position_x,
		    float[] stereo_camera_position_y,
		    float[] stereo_camera_position_z,
		    float[] stereo_camera_pan,
		    float[] stereo_camera_tilt,
		    float[] stereo_camera_roll,
            int[] image_width,
            int[] image_height,
            float[] FOV_degrees,
		    float[][] stereo_features,
		    byte[][,] stereo_features_colour,
		    float[][] stereo_features_uncertainties,
            stereoModel[][] sensormodel,
            ref pos3D[] left_camera_location,
            ref pos3D[] right_camera_location,
            int no_of_samples,
            float sampling_radius_major_mm,
            float sampling_radius_minor_mm,
            pos3D[] robot_pose,
            float max_orientation_variance,
            float max_tilt_variance,
            float max_roll_variance,
            List<pos3D> poses,
            List<float> pose_score,
		    Random rnd,
            ref pos3D pose_offset,
            ref bool buffer_transition,
            string debug_mapping_filename,
            float known_best_pose_x_mm,
            float known_best_pose_y_mm,
		    string overall_map_filename,
		    ref byte[] overall_map_img,
		    int overall_img_width,
		    int overall_img_height,
		    int overall_map_dimension_mm,
		    int overall_map_centre_x_mm,
		    int overall_map_centre_y_mm)		                         
        {
            bool update_map = false;

            // move to the next grid
            buffer_transition = MoveToNextLocalGrid(
                ref current_grid_index,
                ref current_disparity_index,
                robot_pose[0],
                buffer,
                ref current_buffer_index,
                grid_centres,
                ref update_map,
                debug_mapping_filename,
		        overall_map_filename,
		        ref overall_map_img,
		        overall_img_width,
		        overall_img_height,			                                        
		        overall_map_dimension_mm,
		        overall_map_centre_x_mm,
		        overall_map_centre_y_mm);
            
            // create the map if necessary
            if (update_map)
            {
                UpdateMap(
		            body_width_mm,
		            body_length_mm,
		            body_centre_of_rotation_x,
		            body_centre_of_rotation_y,
		            body_centre_of_rotation_z,
		            head_centroid_x,
		            head_centroid_y,
		            head_centroid_z,
		            baseline_mm,
		            stereo_camera_position_x,
		            stereo_camera_position_y,
		            stereo_camera_position_z,
		            stereo_camera_pan,
		            stereo_camera_tilt,
		            stereo_camera_roll,
                    image_width,
                    image_height,
                    FOV_degrees,
                    sensormodel);
            }

            int img_poses_width = 640;
            int img_poses_height = 480;
            byte[] img_poses = null;
            if ((debug_mapping_filename != null) &&
                (debug_mapping_filename != ""))
            {
                img_poses = new byte[img_poses_width * img_poses_height * 3];
            }
        
            // localise within the currently active grid
            float matching_score = 
	            buffer[current_buffer_index].Localise(
			        body_width_mm,
			        body_length_mm,
			        body_centre_of_rotation_x,
			        body_centre_of_rotation_y,
			        body_centre_of_rotation_z,
			        head_centroid_x,
			        head_centroid_y,
			        head_centroid_z,
			        head_pan,
			        head_tilt,
			        head_roll,
			        baseline_mm,
			        stereo_camera_position_x,
			        stereo_camera_position_y,
			        stereo_camera_position_z,
			        stereo_camera_pan,
			        stereo_camera_tilt,
			        stereo_camera_roll,
	                image_width,
	                image_height,
	                FOV_degrees,
			        stereo_features,
			        stereo_features_colour,
			        stereo_features_uncertainties,
	                sensormodel,
	                ref left_camera_location,
	                ref right_camera_location,
	                no_of_samples,
	                sampling_radius_major_mm,
	                sampling_radius_minor_mm,
	                robot_pose,
	                max_orientation_variance,
	                max_tilt_variance,
	                max_roll_variance,
	                poses,
	                pose_score,
			        rnd,
	                ref pose_offset,
                    img_poses,
                    img_poses_width,
                    img_poses_height,
                    known_best_pose_x_mm,
                    known_best_pose_y_mm);
			
			if (matching_score != occupancygridBase.NO_OCCUPANCY_EVIDENCE)
			{	        
		        // add this to the list of localisations                
		        localisations.Add(robot_pose[0].x + pose_offset.x);
		        localisations.Add(robot_pose[0].y + pose_offset.y);
		        localisations.Add(robot_pose[0].z + pose_offset.z);
		        localisations.Add(pose_offset.pan);
		        localisations.Add(matching_score);
	
	            if ((debug_mapping_filename != null) &&
	                (debug_mapping_filename != ""))
	            {
	                string[] str = debug_mapping_filename.Split('.');
	                string poses_filename = str[0] + "_gridcells.gif";
	                Bitmap poses_bmp = new Bitmap(img_poses_width, img_poses_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
	                BitmapArrayConversions.updatebitmap_unsafe(img_poses, poses_bmp);
	                poses_bmp.Save(poses_filename, System.Drawing.Imaging.ImageFormat.Gif);
	            }
			}
	        
	        return(matching_score);
        }
示例#28
0
        /// <summary>
        /// Calculate the position of the robots head and cameras for this pose
        /// the positions returned are relative to the robots centre of rotation
        /// </summary>
        /// <param name="body_width_mm">width of the robot body in millimetres</param>
        /// <param name="body_length_mm">length of the robot body in millimetres</param>
        /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_pan">head pan angle in radians</param>
        /// <param name="head_tilt">head tilt angle in radians</param>
        /// <param name="head_roll">head roll angle in radians</param>
        /// <param name="baseline_mm">stereo camera baseline in millimetres</param>
        /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param>
        /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param>
        /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param>
        /// <param name="head_location">returned location/orientation of the robot head</param>
        /// <param name="camera_centre_location">returned stereo camera centre position/orientation</param>
        /// <param name="left_camera_location">returned left camera position/orientation</param>
        /// <param name="right_camera_location">returned right camera position/orientation</param>
        public static void calculateCameraPositions(
		    float body_width_mm,
		    float body_length_mm,
		    float body_centre_of_rotation_x,
		    float body_centre_of_rotation_y,
		    float body_centre_of_rotation_z,
		    float head_centroid_x,
		    float head_centroid_y,
		    float head_centroid_z,
		    float head_pan,
		    float head_tilt,
		    float head_roll,
		    float baseline_mm,
		    float stereo_camera_position_x,
		    float stereo_camera_position_y,
		    float stereo_camera_position_z,
		    float stereo_camera_pan,
		    float stereo_camera_tilt,
		    float stereo_camera_roll,
            ref pos3D head_location,
            ref pos3D camera_centre_location,
            ref pos3D left_camera_location,
            ref pos3D right_camera_location)
        {
            // calculate the position of the centre of the head relative to 
            // the centre of rotation of the robots body
            pos3D head_centroid = 
				new pos3D(
				    -(body_width_mm * 0.5f) + (body_centre_of_rotation_x - (body_width_mm * 0.5f)) + head_centroid_x,
                    -(body_length_mm * 0.5f) + (body_centre_of_rotation_y - (body_length_mm * 0.5f)) + head_centroid_y,
                    head_centroid_z);

            // location of the centre of the head
            // adjusted for the robot pose and the head pan and tilt angle.
            // Note that the positions and orientations of individual cameras
            // on the head have already been accounted for within stereoModel.createObservation
            pos3D head_locn = 
				head_centroid.rotate(
				    head_pan, head_tilt, 0);
            head_location.copyFrom(head_locn);

            // calculate the position of the centre of the stereo camera
            // (baseline centre point)
            pos3D camera_centre_locn = 
				new pos3D(
				    stereo_camera_position_x, 
				    stereo_camera_position_y, 
				    stereo_camera_position_z);
			
			// rotate the stereo camera	    	    
            camera_centre_locn = 
                camera_centre_locn.rotate(
                    stereo_camera_pan + head_pan, 
                    stereo_camera_tilt + head_tilt, 
                    stereo_camera_roll + head_roll);
            
            // move the stereo camera relative to the head position
            camera_centre_location = 
                camera_centre_locn.translate(
                    head_location.x, 
                    head_location.y, 
                    head_location.z);

            // where are the left and right cameras?
            // we need to know this for the origins of the vacancy models
            float half_baseline_length = baseline_mm * 0.5f;
            pos3D left_camera_locn = new pos3D(-half_baseline_length, 0, 0);
            left_camera_locn = left_camera_locn.rotate(stereo_camera_pan + head_pan, stereo_camera_tilt + head_tilt, stereo_camera_roll + head_roll);            
            pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z);
            left_camera_location = left_camera_locn.translate(camera_centre_location.x, camera_centre_location.y, camera_centre_location.z);
            right_camera_location = right_camera_locn.translate(camera_centre_location.x, camera_centre_location.y, camera_centre_location.z);
            right_camera_location.pan = left_camera_location.pan;
        }
示例#29
0
		public void RotatePoint()
		{
		    pos3D point = new pos3D(0, 100, 0);
		    point.pan = DegreesToRadians(10);
		    
		    pos3D rotated = point.rotate(DegreesToRadians(30), 0, 0);
            int pan = (int)Math.Round(RadiansToDegrees(rotated.pan));		    		    		    		    
		    Assert.AreEqual(40, pan, "pan positive");
		    Assert.AreEqual(50, (int)Math.Round(rotated.x), "pan positive x");
		    Assert.AreEqual(87, (int)Math.Round(rotated.y), "pan positive y");

   		    rotated = point.rotate(DegreesToRadians(-30), 0, 0);
            pan = (int)Math.Round(RadiansToDegrees(rotated.pan));
		    Assert.AreEqual(-20, pan, "pan negative");
		    Assert.AreEqual(-50, (int)Math.Round(rotated.x), "pan negative x");
		    Assert.AreEqual(87, (int)Math.Round(rotated.y), "pan negative y");
		    
		    point.pan = 0;
		    point.tilt = DegreesToRadians(5);
   		    pos3D tilted = point.rotate(0, DegreesToRadians(30), 0);
            int tilt = (int)Math.Round(RadiansToDegrees(tilted.tilt));
		    Assert.AreEqual(35, tilt, "tilt positive");

   		    point.pan = 0;
   		    point.tilt = 0;
		    point.roll = DegreesToRadians(2);
   		    pos3D rolled = point.rotate(0, 0, DegreesToRadians(-20));
            int roll = (int)Math.Round(RadiansToDegrees(rolled.roll));
		    Assert.AreEqual(-18, roll, "roll negative");

		    //Console.WriteLine("x = " + rotated.x.ToString());
		    //Console.WriteLine("y = " + rotated.y.ToString());
		}
示例#30
0
        /// <summary>
        /// Returns positions of grid cells corresponding to a moire grid
        /// produced by the interference of a pair of hexagonal grids (theta waves)
        /// </summary>
        /// <param name="sampling_radius_major_mm">radius of the major axis of the bounding ellipse</param>
        /// <param name="sampling_radius_minor_mm">radius of the minor axis of the bounding ellipse</param>
        /// <param name="first_grid_spacing">spacing of the first hexagonal grid (theta wavelength 1)</param>
        /// <param name="second_grid_spacing">spacing of the second hexagonal grid (theta wavelength 2)</param>
        /// <param name="first_grid_rotation_degrees">rotation of the first grid (theta field 1)</param>
        /// <param name="second_grid_rotation_degrees">rotation of the second grid (theta field 2)</param>
        /// <param name="dimension_x_cells">number of grid cells in the x axis</param>
        /// <param name="dimension_y_cells">number of grid cells in the y axis</param>
        /// <param name="img">image data</param>
        /// <param name="img_width">image width</param>
        /// <param name="img_height">image height</param>
        public static void ShowMoireGrid(
            float sampling_radius_major_mm,
            float sampling_radius_minor_mm,
            float first_grid_spacing,
            float second_grid_spacing,
            float first_grid_rotation_degrees,
            float second_grid_rotation_degrees,
            int dimension_x_cells,
            int dimension_y_cells,
            float scaling_factor,
            byte[] img,
            int img_width,
            int img_height)
        {
            float[,,] grid1 = new float[dimension_x_cells, dimension_y_cells, 2];
            float[,,] grid2 = new float[dimension_x_cells, dimension_y_cells, 2];
            float rotation   = 0;
            float min_x      = float.MaxValue;
            float max_x      = float.MinValue;
            float min_y      = float.MaxValue;
            float max_y      = float.MinValue;
            float radius_sqr = sampling_radius_minor_mm * sampling_radius_minor_mm;

            for (int grd = 0; grd < 2; grd++)
            {
                float spacing = first_grid_spacing;
                if (grd == 1)
                {
                    spacing = second_grid_spacing;
                }

                float half_grid_spacing = spacing / 2;
                float half_x            = spacing * dimension_x_cells / 2;
                float half_y            = spacing * dimension_y_cells / 2;

                if (grd == 0)
                {
                    rotation = first_grid_rotation_degrees * (float)Math.PI / 180;
                }
                else
                {
                    rotation = second_grid_rotation_degrees * (float)Math.PI / 180;
                }

                for (int cell_x = 0; cell_x < dimension_x_cells; cell_x++)
                {
                    for (int cell_y = 0; cell_y < dimension_y_cells; cell_y++)
                    {
                        float x = (cell_x * spacing) - half_x;
                        if (cell_y % 2 == 0)
                        {
                            x += half_grid_spacing;
                        }
                        float y = (cell_y * spacing) - half_y;
                        x *= scaling_factor;
                        y *= scaling_factor;
                        pos3D p = new pos3D(x, y, 0);
                        p = p.rotate(rotation, 0, 0);
                        if (grd == 0)
                        {
                            grid1[cell_x, cell_y, 0] = p.x;
                            grid1[cell_x, cell_y, 1] = p.y;
                        }
                        else
                        {
                            grid2[cell_x, cell_y, 0] = p.x;
                            grid2[cell_x, cell_y, 1] = p.y;
                        }
                        if (p.x < min_x)
                        {
                            min_x = p.x;
                        }
                        if (p.y < min_y)
                        {
                            min_y = p.y;
                        }
                        if (p.x > max_x)
                        {
                            max_x = p.x;
                        }
                        if (p.y > max_y)
                        {
                            max_y = p.y;
                        }
                    }
                }
            }

            for (int i = (img_width * img_height * 3) - 1; i >= 0; i--)
            {
                img[i] = 0;
            }

            int radius = img_width / (dimension_x_cells * 350 / 100);

            if (radius < 2)
            {
                radius = 2;
            }
            int r, g, b;

            for (int grd = 0; grd < 2; grd++)
            {
                float[,,] grid = grid1;
                r = 5;
                g = -1;
                b = -1;
                if (grd == 1)
                {
                    grid = grid2;
                    r    = -1;
                    g    = 5;
                    b    = -1;
                }

                for (int cell_x = 0; cell_x < dimension_x_cells; cell_x++)
                {
                    for (int cell_y = 0; cell_y < dimension_y_cells; cell_y++)
                    {
                        int x = ((img_width - img_height) / 2) + (int)((grid[cell_x, cell_y, 0] - min_x) * img_height / (max_x - min_x));
                        int y = (int)((grid[cell_x, cell_y, 1] - min_y) * img_height / (max_y - min_y));

                        float dx = grid[cell_x, cell_y, 0];
                        float dy = (grid[cell_x, cell_y, 1] * sampling_radius_minor_mm) / sampling_radius_major_mm;

                        float dist = dx * dx + dy * dy;
                        if (dist <= radius_sqr)
                        {
                            drawing.drawSpotOverlay(img, img_width, img_height, x, y, radius, r, g, b);
                        }
                    }
                }

                for (int i = (img_width * img_height * 3) - 3; i >= 2; i -= 3)
                {
                    if ((img[i + 2] > 0) && (img[i + 1] > 0))
                    {
                        img[i + 2] = 255;
                        img[i + 1] = 255;
                    }
                }
            }
        }
示例#31
0
        /// <summary>
        /// Show a diagram of the robot in this pose
        /// This is useful for checking that the positions of cameras have
        /// been calculated correctly
        /// </summary>
        /// <param name="robot">robot object</param>
        /// <param name="img">image as a byte array</param>
        /// <param name="width">width of the image</param>
        /// <param name="height">height of the image</param>
        /// <param name="clearBackground">clear the background before drawing</param>
        /// <param name="min_x_mm">top left x coordinate</param>
        /// <param name="min_y_mm">top left y coordinate</param>
        /// <param name="max_x_mm">bottom right x coordinate</param>
        /// <param name="max_y_mm">bottom right y coordinate</param>
        /// <param name="showFieldOfView">whether to show the field of view of each camera</param>
        public void Show(robot rob,
                         byte[] img, int width, int height, bool clearBackground,
                         int min_x_mm, int min_y_mm,
                         int max_x_mm, int max_y_mm, int line_width,
                         bool showFieldOfView)
        {
            if (clearBackground)
            {
                for (int i = 0; i < width * height * 3; i++)
                {
                    img[i] = 255;
                }
            }

            // get the positions of the head and cameras for this pose
            pos3D head_location = new pos3D(0, 0, 0);

            pos3D[] camera_centre_location = new pos3D[rob.head.no_of_stereo_cameras];
            pos3D[] left_camera_location   = new pos3D[rob.head.no_of_stereo_cameras];
            pos3D[] right_camera_location  = new pos3D[rob.head.no_of_stereo_cameras];
            calculateCameraPositions(rob, ref head_location,
                                     ref camera_centre_location,
                                     ref left_camera_location,
                                     ref right_camera_location);

            int w = max_x_mm - min_x_mm;
            int h = max_y_mm - min_y_mm;

            // draw the body
            int xx   = (int)((x - min_x_mm) * width / w);
            int yy   = (int)((y - min_y_mm) * height / h);
            int wdth = (int)(rob.BodyWidth_mm * width / w);
            int hght = (int)(rob.BodyLength_mm * height / h);

            drawing.drawBox(img, width, height, xx, yy, wdth, hght, pan, 0, 255, 0, line_width);

            // draw the head
            xx = (int)((head_location.x - min_x_mm) * width / w);
            yy = (int)((head_location.y - min_y_mm) * height / h);
            int radius = (int)(rob.HeadSize_mm * width / w);

            drawing.drawBox(img, width, height, xx, yy, radius, radius, head_location.pan, 0, 255, 0, line_width);

            // draw the cameras
            for (int cam = 0; cam < rob.head.no_of_stereo_cameras; cam++)
            {
                // draw the left camera
                int xx1 = (int)((left_camera_location[cam].x - min_x_mm) * width / w);
                int yy1 = (int)((left_camera_location[cam].y - min_y_mm) * height / h);
                wdth = (int)((rob.head.calibration[cam].baseline / 4) * width / w);
                hght = (int)((rob.head.calibration[cam].baseline / 12) * height / h);
                if (hght < 1)
                {
                    hght = 1;
                }
                drawing.drawBox(img, width, height, xx1, yy1, wdth, hght, left_camera_location[cam].pan + (float)(Math.PI / 2), 0, 255, 0, line_width);

                // draw the right camera
                int xx2 = (int)((right_camera_location[cam].x - min_x_mm) * width / w);
                int yy2 = (int)((right_camera_location[cam].y - min_y_mm) * height / h);
                drawing.drawBox(img, width, height, xx2, yy2, wdth, hght, right_camera_location[cam].pan + (float)(Math.PI / 2), 0, 255, 0, line_width);

                if (showFieldOfView)
                {
                    float half_FOV = rob.head.calibration[cam].leftcam.camera_FOV_degrees * (float)Math.PI / 360.0f;
                    int   xx_ray   = xx1 + (int)(width * Math.Sin(left_camera_location[cam].pan + half_FOV));
                    int   yy_ray   = yy1 + (int)(width * Math.Cos(left_camera_location[cam].pan + half_FOV));
                    drawing.drawLine(img, width, height, xx1, yy1, xx_ray, yy_ray, 200, 200, 255, 0, false);
                    xx_ray = xx1 + (int)(width * Math.Sin(left_camera_location[cam].pan - half_FOV));
                    yy_ray = yy1 + (int)(width * Math.Cos(left_camera_location[cam].pan - half_FOV));
                    drawing.drawLine(img, width, height, xx1, yy1, xx_ray, yy_ray, 200, 200, 255, 0, false);
                    xx_ray = xx2 + (int)(width * Math.Sin(right_camera_location[cam].pan + half_FOV));
                    yy_ray = yy2 + (int)(width * Math.Cos(right_camera_location[cam].pan + half_FOV));
                    drawing.drawLine(img, width, height, xx2, yy2, xx_ray, yy_ray, 200, 200, 255, 0, false);
                    xx_ray = xx2 + (int)(width * Math.Sin(right_camera_location[cam].pan - half_FOV));
                    yy_ray = yy2 + (int)(width * Math.Cos(right_camera_location[cam].pan - half_FOV));
                    drawing.drawLine(img, width, height, xx2, yy2, xx_ray, yy_ray, 200, 200, 255, 0, false);
                }
            }
        }
示例#32
0
        /// <summary>
        /// insert a stereo ray into the grid
        /// </summary>
        /// <param name="ray">stereo ray</param>
        /// <param name="sensormodel">sensor model for each grid level</param>
        /// <param name="left_camera_location">left stereo camera position and orientation</param>
        /// <param name="right_camera_location">right stereo camera position and orientation</param>
        /// <param name="localiseOnly">whether we are mapping or localising</param>
        /// <returns>localisation matching score</returns>
        public float Insert(
            evidenceRay ray,
		    stereoModel[] sensormodel,             
            pos3D left_camera_location,
            pos3D right_camera_location,
            bool localiseOnly)
        {
            float matchingScore = occupancygridBase.NO_OCCUPANCY_EVIDENCE;
            switch (grid_type)
            {
                case TYPE_SIMPLE:
                {
                    for (int grid_level = 0; grid_level < grid.Length; grid_level++)
                    {
					    rayModelLookup sensormodel_lookup = sensormodel[grid_level].ray_model;
                        occupancygridSimple grd = (occupancygridSimple)grid[grid_level];
                        float score = grd.Insert(ray, sensormodel_lookup, left_camera_location, right_camera_location, localiseOnly);

                        if (score != occupancygridBase.NO_OCCUPANCY_EVIDENCE)
                        {
                            if (matchingScore == occupancygridBase.NO_OCCUPANCY_EVIDENCE) matchingScore = 0;
                            matchingScore += grid_weight[grid_level] * score;
                        }
                    }
                    break;
                }                
            }
            return (matchingScore);
        }
示例#33
0
        /// <summary>
        /// Mapping
        /// </summary>
        /// <param name="body_width_mm">width of the robot body in millimetres</param>
        /// <param name="body_length_mm">length of the robot body in millimetres</param>
        /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_pan">head pan angle in radians</param>
        /// <param name="head_tilt">head tilt angle in radians</param>
        /// <param name="head_roll">head roll angle in radians</param>
        /// <param name="baseline_mm">stereo camera baseline in millimetres</param>
        /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param>
        /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param>
        /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param>
        /// <param name="image_width">image width for each stereo camera</param>
        /// <param name="image_height">image height for each stereo camera</param>
        /// <param name="FOV_degrees">field of view for each stereo camera in degrees</param>
        /// <param name="stereo_features">stereo features (disparities) for each stereo camera</param>
        /// <param name="stereo_features_colour">stereo feature colours for each stereo camera</param>
        /// <param name="stereo_features_uncertainties">stereo feature uncertainties (priors) for each stereo camera</param>
        /// <param name="sensormodel">sensor model for each grid level</param>
        /// <param name="left_camera_location">returned position and orientation of the left camera on each stereo camera</param>
        /// <param name="right_camera_location">returned position and orientation of the right camera on each stereo camera</param>
        /// <param name="robot_pose">current estimated position and orientation of the robots centre of rotation</param>
        public void Map(
		    float body_width_mm,
		    float body_length_mm,
		    float body_centre_of_rotation_x,
		    float body_centre_of_rotation_y,
		    float body_centre_of_rotation_z,
		    float head_centroid_x,
		    float head_centroid_y,
		    float head_centroid_z,
		    float head_pan,
		    float head_tilt,
		    float head_roll,
		    int stereo_camera_index,
		    float baseline_mm,
		    float stereo_camera_position_x,
		    float stereo_camera_position_y,
		    float stereo_camera_position_z,
		    float stereo_camera_pan,
		    float stereo_camera_tilt,
		    float stereo_camera_roll,
            int image_width,
            int image_height,
            float FOV_degrees,
		    float[] stereo_features,
		    byte[,] stereo_features_colour,
		    float[] stereo_features_uncertainties,
            stereoModel[][] sensormodel,
            ref pos3D left_camera_location,
            ref pos3D right_camera_location,
            pos3D robot_pose)
        {
            // positions of the left and right camera relative to the robots centre of rotation
            pos3D head_location = new pos3D(0,0,0);
            pos3D camera_centre_location = null;
            pos3D relative_left_cam = null;
            pos3D relative_right_cam = null;
			
            occupancygridBase.calculateCameraPositions(
	            body_width_mm,
	            body_length_mm,
	            body_centre_of_rotation_x,
	            body_centre_of_rotation_y,
	            body_centre_of_rotation_z,
	            head_centroid_x,
	            head_centroid_y,
	            head_centroid_z,
	            head_pan,
	            head_tilt,
	            head_roll,
	            baseline_mm,
	            stereo_camera_position_x,
	            stereo_camera_position_y,
	            stereo_camera_position_z,
	            stereo_camera_pan,
	            stereo_camera_tilt,
	            stereo_camera_roll,
                ref head_location,
                ref camera_centre_location,
                ref relative_left_cam,
                ref relative_right_cam);
                                       
            left_camera_location = relative_left_cam.translate(robot_pose.x, robot_pose.y, robot_pose.z);
            right_camera_location = relative_right_cam.translate(robot_pose.x, robot_pose.y, robot_pose.z);

            pos3D stereo_camera_centre = new pos3D(0, 0, 0);

            // update the grid
			
            // centre position between the left and right cameras
            stereo_camera_centre.x = left_camera_location.x + ((right_camera_location.x - left_camera_location.x) * 0.5f);
            stereo_camera_centre.y = left_camera_location.y + ((right_camera_location.y - left_camera_location.y) * 0.5f);
            stereo_camera_centre.z = left_camera_location.z + ((right_camera_location.z - left_camera_location.z) * 0.5f);
            stereo_camera_centre.pan = robot_pose.pan + head_pan + stereo_camera_pan;
            stereo_camera_centre.tilt = robot_pose.tilt + head_tilt + stereo_camera_tilt;
            stereo_camera_centre.roll = robot_pose.roll + head_roll + stereo_camera_roll;

            // create a set of stereo rays as observed from this pose
            List<evidenceRay> rays = sensormodel[stereo_camera_index][0].createObservation(
                stereo_camera_centre,
                baseline_mm,
                image_width,
                image_height,
                FOV_degrees,
                stereo_features,
                stereo_features_colour,
                stereo_features_uncertainties,
                true);

		    // insert rays into the occupancy grid
            for (int r = 0; r < rays.Count; r++)
            {
                Insert(rays[r], sensormodel[stereo_camera_index], left_camera_location, right_camera_location, false);
            }
        }
示例#34
0
        /// <summary>
        /// returns a score indicating how closely matched the two viewpoints are
        /// </summary>
        /// <param name="other"></param>
        /// <param name="intersects"></param>
        /// <returns></returns>
        public float matchingScore(viewpoint other, float separation_tollerance, int ray_thickness, ArrayList intersects)
        {
            float separation;
            float score = 0;
            pos3D intersectPos = new pos3D(0, 0, 0);

            if (ray_thickness < 1) ray_thickness = 1;

            for (int cam1 = 0; cam1 < rays.Length; cam1++)
            {
                for (int ry1 = 0; ry1 < rays[cam1].Count; ry1++)
                {
                    evidenceRay ray1 = (evidenceRay)rays[cam1][ry1];
                    int pan_index1 = ray1.pan_index - 1;
                    int pan_index2 = ray1.pan_index;
                    int pan_index3 = ray1.pan_index + 1;
                    if (pan_index1 < 0) pan_index1 = evidenceRay.pan_steps - 1;
                    if (pan_index3 >= evidenceRay.pan_steps) pan_index3 = 0;

                    int cam2 = cam1;
                    //for (int cam2 = 0; cam2 < other.rays.Length; cam2++)
                    {
                        for (int ry2 = 0; ry2 < other.rays[cam2].Count; ry2++)
                        {
                            evidenceRay ray2 = (evidenceRay)other.rays[cam2][ry2];
                            int pan_index4 = ray2.pan_index;
                            if ((pan_index1 == pan_index4) ||
                                (pan_index2 == pan_index4) ||
                                (pan_index3 == pan_index4))
                            {
                                //do these rays intersect
                                separation = 999;
                                if (stereoModel.raysOverlap(ray1, ray2, intersectPos, separation_tollerance, ray_thickness, ref separation))
                                {
                                    float p1 = ray1.probability(intersectPos.x, intersectPos.y);
                                    float p2 = ray2.probability(intersectPos.x, intersectPos.y);
                                    float prob = (p1 * p2) + ((1.0f - p1) * (1.0f - p2));

                                    if (intersects != null)
                                    {
                                        // add the intersection to the list
                                        intersects.Add(intersectPos);
                                        intersectPos = new pos3D(0, 0, 0);
                                    }

                                    //increment the matching score
                                    score += 1.0f / (1.0f + ((1.0f - prob) * separation * separation));
                                    //score += 1.0f / (1.0f + (separation * separation) + (ray2.length * ray1.length / 2));
                                    //score += 1.0f / (1.0f + (separation * separation));
                                }
                            }
                        }
                    }

                }
            }
            return (score);
        }
示例#35
0
        /// <summary>
        /// Returns positions of grid cells corresponding to a moire grid
        /// produced by the interference of a pair of hexagonal grids (theta waves)
        /// </summary>
        /// <param name="first_grid_spacing">spacing of the first hexagonal grid (theta wavelength 1)</param>
        /// <param name="second_grid_spacing">spacing of the second hexagonal grid (theta wavelength 2)</param>
        /// <param name="phase_major_degrees">phase precession in the direction of motion in degrees</param>
        /// <param name="phase_minor_degrees">phase precession perpendicular to the direction of motion in degrees</param>
        /// <param name="first_grid_rotation_degrees">rotation of the first grid (theta field 1)</param>
        /// <param name="second_grid_rotation_degrees">rotation of the second grid (theta field 2)</param>
        /// <param name="dimension_x_cells">number of grid cells in the x axis</param>
        /// <param name="dimension_y_cells">number of grid cells in the y axis</param>
        /// <param name="scaling_factor">scaling factor (k)</param>
        /// <param name="cells">returned grid cell positions</param>
        public static void CreateMoireGrid(
            float sampling_radius_major_mm,
            float sampling_radius_minor_mm,
            float first_grid_spacing,
            float second_grid_spacing,
            float first_grid_rotation_degrees,
            float second_grid_rotation_degrees,
            int dimension_x_cells,
            int dimension_y_cells,
            float scaling_factor,
            ref List <pos3D> cells)
        {
            cells.Clear();
            float scaling = 0;
            float orientation = 0;
            float dx, dy;

            // compute scaling and orientation of the grid
            MoireGrid(
                first_grid_spacing,
                second_grid_spacing,
                first_grid_rotation_degrees,
                second_grid_rotation_degrees,
                scaling_factor,
                ref scaling,
                ref orientation);

            float moire_grid_spacing = first_grid_spacing * scaling;
            float half_grid_spacing  = moire_grid_spacing / 2;

            float radius_sqr = sampling_radius_minor_mm * sampling_radius_minor_mm;

            Console.WriteLine("moire_grid_spacing: " + moire_grid_spacing.ToString());
            Console.WriteLine("radius_sqr: " + radius_sqr.ToString());

            float half_x = dimension_x_cells * moire_grid_spacing / 2.0f;
            float half_y = dimension_y_cells * moire_grid_spacing / 2.0f;

            for (int cell_x = 0; cell_x < dimension_x_cells; cell_x++)
            {
                for (int cell_y = 0; cell_y < dimension_y_cells; cell_y++)
                {
                    float x = (cell_x * moire_grid_spacing) - half_x;
                    if (cell_y % 2 == 0)
                    {
                        x += half_grid_spacing;
                    }
                    float y         = (cell_y * moire_grid_spacing) - half_y;
                    pos3D grid_cell = new pos3D(x, y, 0);
                    grid_cell = grid_cell.rotate(-orientation, 0, 0);

                    dx = grid_cell.x;
                    dy = (grid_cell.y * sampling_radius_minor_mm) / sampling_radius_major_mm;

                    float dist = dx * dx + dy * dy;
                    if (dist <= radius_sqr)
                    {
                        cells.Add(grid_cell);
                    }
                }
            }
        }
示例#36
0
        // find the centre of the path
        public pos3D pathCentre()
        {            
            float centre_x = 0;
            float centre_y = 0;
            for (int v = 0; v < viewpoints.Count; v++)
            {
                viewpoint view = (viewpoint)viewpoints[v];
                centre_x += view.odometry_position.x;
                centre_y += view.odometry_position.y;
            }
            centre_x /= viewpoints.Count;
            centre_y /= viewpoints.Count;

            pos3D centre = new pos3D(centre_x, centre_y, 0);
            return (centre);
        }
示例#37
0
        public static void CreateMoireGrid(
            float sampling_radius_major_mm,
            float sampling_radius_minor_mm,
            int no_of_poses,
            float pan,
            float tilt,
            float roll,
            float max_orientation_variance,
            float max_tilt_variance,
            float max_roll_variance,
            Random rnd,
            ref List <pos3D> cells,
            byte[] img_basic,
            byte[] img_moire,
            int img_width,
            int img_height)
        {
            float first_grid_spacing           = sampling_radius_minor_mm / (float)Math.Sqrt(no_of_poses);
            float second_grid_spacing          = first_grid_spacing * 1.1f;
            float first_grid_rotation_degrees  = 0;
            float second_grid_rotation_degrees = 10;
            float scaling_factor = 0.3f;

            int dimension_x_cells = 50;
            int dimension_y_cells = 50;

            int cells_percent = 0;

            cells.Clear();
            int tries = 0;

            while (((cells_percent < 90) || (cells_percent > 110)) &&
                   (tries < 10))
            {
                CreateMoireGrid(
                    sampling_radius_major_mm,
                    sampling_radius_minor_mm,
                    first_grid_spacing,
                    second_grid_spacing,
                    first_grid_rotation_degrees,
                    second_grid_rotation_degrees,
                    dimension_x_cells,
                    dimension_y_cells,
                    scaling_factor,
                    ref cells);

                cells_percent = cells.Count * 100 / no_of_poses;

                if (cells_percent < 90)
                {
                    scaling_factor *= 0.9f;
                }
                if (cells_percent > 110)
                {
                    scaling_factor *= 1.1f;
                }
                tries++;

                Console.WriteLine("Cells = " + cells.Count.ToString());
            }

            for (int i = cells.Count - 1; i >= 0; i--)
            {
                pos3D sample_pose = cells[i];
                if (i % 2 == 0)
                {
                    sample_pose.pan = pan + ((float)rnd.NextDouble() * max_orientation_variance);
                }
                else
                {
                    sample_pose.pan = pan - ((float)rnd.NextDouble() * max_orientation_variance);
                }
                sample_pose.tilt = tilt + (((float)rnd.NextDouble() - 0.5f) * 2 * max_tilt_variance);
                sample_pose.roll = roll + (((float)rnd.NextDouble() - 0.5f) * 2 * max_roll_variance);
            }

            // create an image showing the results
            if (img_basic != null)
            {
                ShowMoireGrid(
                    sampling_radius_major_mm,
                    sampling_radius_minor_mm,
                    first_grid_spacing,
                    second_grid_spacing,
                    first_grid_rotation_degrees,
                    second_grid_rotation_degrees,
                    dimension_x_cells * 2,
                    dimension_y_cells * 2,
                    scaling_factor,
                    img_moire,
                    img_width,
                    img_height);

                float max_radius = sampling_radius_major_mm * 0.025f;
                for (int i = img_basic.Length - 1; i >= 0; i--)
                {
                    img_basic[i] = 0;
                }
                int tx = -(int)(sampling_radius_major_mm);
                int ty = -(int)(sampling_radius_major_mm);
                int bx = (int)(sampling_radius_major_mm);
                int by = (int)(sampling_radius_major_mm);

                for (int i = cells.Count - 1; i >= 0; i--)
                {
                    pos3D p      = cells[i];
                    int   x      = ((img_width - img_height) / 2) + (int)((p.x - tx) * img_height / (sampling_radius_major_mm * 2));
                    int   y      = (int)((p.y - ty) * img_height / (sampling_radius_major_mm * 2));
                    int   radius = (int)(max_radius * img_width / (sampling_radius_major_mm * 2));
                    int   r      = (int)(((p.pan - pan) - (-max_orientation_variance)) * 255 / (max_orientation_variance * 2));
                    int   g      = 255 - r;
                    int   b      = 0;
                    drawing.drawSpot(img_basic, img_width, img_height, x, y, radius, r, g, b);
                }
            }
        }
示例#38
0
        /// <summary>
        /// given a set of poses and their matching scores return the best pose
        /// </summary>
        /// <param name="poses">list of poses which have been evaluated</param>
        /// <param name="scores">localisation matching score for each pose</param>
        /// <param name="best_pose">returned best pose</param>
        /// <param name="sampling_radius_major_mm">major axis length</param>
        /// <param name="img">optional image data</param>
        /// <param name="img_width">image width</param>
        /// <param name="img_height">image height</param>
        static public void FindBestPose(
            List <pos3D> poses,
            List <float> scores,
            ref pos3D best_pose,
            float sampling_radius_major_mm,
            byte[] img_poses, byte[] img_graph,
            int img_width,
            int img_height,
            float known_best_pose_x,
            float known_best_pose_y)
        {
            float peak_radius = sampling_radius_major_mm * 0.5f;
            float max_score   = float.MinValue;
            float min_score   = float.MaxValue;
            float peak_x      = 0;
            float peak_y      = 0;
            float peak_z      = 0;

            for (int i = 0; i < scores.Count; i++)
            {
                if (scores[i] < min_score)
                {
                    min_score = scores[i];
                }
                if (scores[i] > max_score)
                {
                    max_score = scores[i];
                    peak_x    = poses[i].x;
                    peak_y    = poses[i].y;
                    peak_z    = poses[i].z;
                }
            }

            float score_range    = max_score - min_score;
            float minimum_score  = min_score + (score_range * 0.5f);
            float minimum_score2 = min_score + (score_range * 0.8f);

            //float minimum_score = 0.98f;
            //float minimum_score2 = 0.99f;

            if (best_pose == null)
            {
                best_pose = new pos3D(0, 0, 0);
            }
            best_pose.x    = 0;
            best_pose.y    = 0;
            best_pose.z    = 0;
            best_pose.pan  = 0;
            best_pose.tilt = 0;
            best_pose.roll = 0;
            float hits = 0;

            if (score_range > 0)
            {
                for (int i = 0; i < poses.Count; i++)
                {
                    if (scores[i] > minimum_score2)
                    {
                        float dx = poses[i].x - peak_x;
                        float dy = poses[i].y - peak_y;
                        float dz = poses[i].z - peak_z;
                        //if (Math.Abs(dx) < peak_radius)
                        {
                            //if (Math.Abs(dy) < peak_radius)
                            {
                                //if (Math.Abs(dz) < peak_radius)
                                {
                                    float score = (scores[i] - min_score) / score_range;
                                    score          *= score;
                                    best_pose.x    += poses[i].x * score;
                                    best_pose.y    += poses[i].y * score;
                                    best_pose.z    += poses[i].z * score;
                                    best_pose.pan  += poses[i].pan * score;
                                    best_pose.tilt += poses[i].tilt * score;
                                    best_pose.roll += poses[i].roll * score;
                                    hits           += score;
                                }
                            }
                        }
                    }
                }
            }
            if (hits > 0)
            {
                best_pose.x    /= hits;
                best_pose.y    /= hits;
                best_pose.z    /= hits;
                best_pose.pan  /= hits;
                best_pose.tilt /= hits;
                best_pose.roll /= hits;
            }

            float grad = 1;

            if (Math.Abs(best_pose.x) < 0.0001f)
            {
                grad = best_pose.y / best_pose.x;
            }
            float d1 = (float)Math.Sqrt(best_pose.x * best_pose.x + best_pose.y * best_pose.y);

            if (d1 < 0.001f)
            {
                d1 = 0.001f;
            }
            float origin_x = best_pose.x * sampling_radius_major_mm * 2 / d1;       //sampling_radius_major_mm*best_pose.x/Math.Abs(best_pose.x);
            float origin_y = best_pose.y * sampling_radius_major_mm * 2 / d1;

            List <float> points = new List <float>();
            float        points_min_distance = float.MaxValue;
            float        points_max_distance = float.MinValue;
            float        dxx = best_pose.y;
            float        dyy = best_pose.x;
            float        dist_to_best_pose = (float)Math.Sqrt(dxx * dxx + dyy * dyy);
            float        max_score2        = 0;

            for (int i = 0; i < poses.Count; i++)
            {
                if (scores[i] > minimum_score)
                {
                    float ix = 0;
                    float iy = 0;
                    geometry.intersection(
                        0, 0, best_pose.x, best_pose.y,
                        poses[i].x - dxx, poses[i].y - dyy, poses[i].x, poses[i].y,
                        ref ix, ref iy);

                    float dxx2  = ix - origin_x;
                    float dyy2  = iy - origin_y;
                    float dist  = (float)Math.Sqrt(dxx2 * dxx2 + dyy2 * dyy2);
                    float score = scores[i];
                    points.Add(dist);
                    points.Add(score);
                    if (score > max_score2)
                    {
                        max_score2 = score;
                    }
                    if (dist < points_min_distance)
                    {
                        points_min_distance = dist;
                    }
                    if (dist > points_max_distance)
                    {
                        points_max_distance = dist;
                    }
                }
            }


            // create an image showing the results
            if ((img_poses != null) && (score_range > 0))
            {
                float max_radius = img_width / 50; // sampling_radius_major_mm * 0.1f;
                for (int i = img_poses.Length - 1; i >= 0; i--)
                {
                    img_poses[i] = 0;
                    if (img_graph != null)
                    {
                        img_graph[i] = 255;
                    }
                }
                int tx = -(int)(sampling_radius_major_mm);
                int ty = -(int)(sampling_radius_major_mm);
                int bx = (int)(sampling_radius_major_mm);
                int by = (int)(sampling_radius_major_mm);

                int origin_x2 = (int)((origin_x - tx) * img_width / (sampling_radius_major_mm * 2));
                int origin_y2 = img_height - 1 - (int)((origin_y - ty) * img_height / (sampling_radius_major_mm * 2));
                int origin_x3 = (int)((0 - tx) * img_width / (sampling_radius_major_mm * 2));
                int origin_y3 = img_height - 1 - (int)((0 - ty) * img_height / (sampling_radius_major_mm * 2));
                drawing.drawLine(img_poses, img_width, img_height, origin_x3, origin_y3, origin_x2, origin_y2, 255, 255, 0, 1, false);

                for (int i = 0; i < poses.Count; i++)
                {
                    pos3D p           = poses[i];
                    float score       = scores[i];
                    int   x           = (int)((p.x - tx) * img_width / (sampling_radius_major_mm * 2));
                    int   y           = img_height - 1 - (int)((p.y - ty) * img_height / (sampling_radius_major_mm * 2));
                    int   radius      = (int)((score - min_score) * max_radius / score_range);
                    byte  intensity_r = (byte)((score - min_score) * 255 / score_range);
                    byte  intensity_g = intensity_r;
                    byte  intensity_b = intensity_r;
                    if (score < minimum_score)
                    {
                        intensity_r = 0;
                        intensity_g = 0;
                    }
                    if ((score >= minimum_score) &&
                        (score < max_score - ((max_score - minimum_score) * 0.5f)))
                    {
                        intensity_g = 0;
                    }
                    drawing.drawSpot(img_poses, img_width, img_height, x, y, radius, intensity_r, intensity_g, intensity_b);
                }

                int best_x = (int)((best_pose.x - tx) * img_width / (sampling_radius_major_mm * 2));
                int best_y = img_height - 1 - (int)((best_pose.y - ty) * img_height / (sampling_radius_major_mm * 2));
                drawing.drawCross(img_poses, img_width, img_height, best_x, best_y, (int)max_radius, 255, 0, 0, 1);

                int known_best_x = (int)((known_best_pose_x - tx) * img_width / (sampling_radius_major_mm * 2));
                int known_best_y = img_height - 1 - (int)((known_best_pose_y - ty) * img_height / (sampling_radius_major_mm * 2));
                drawing.drawCross(img_poses, img_width, img_height, known_best_x, known_best_y, (int)max_radius, 0, 255, 0, 1);

                if (img_graph != null)
                {
                    // draw the graph
                    for (int i = 0; i < points.Count; i += 2)
                    {
                        int graph_x = (int)((points[i] - points_min_distance) * (img_width - 1) / (points_max_distance - points_min_distance));
                        int graph_y = img_height - 1 - ((int)(points[i + 1] * (img_height - 1) / (max_score2 * 1.1f)));
                        drawing.drawCross(img_graph, img_width, img_height, graph_x, graph_y, 3, 0, 0, 0, 0);
                    }
                }
            }
        }
示例#39
0
		public void EvidenceRayRotation()
		{
			int debug_img_width = 640;
			int debug_img_height = 480;
		    byte[] debug_img = new byte[debug_img_width * debug_img_height * 3];
			for (int i = (debug_img_width * debug_img_height * 3)-1; i >= 0; i--)
				debug_img[i] = 255;
			Bitmap bmp = new Bitmap(debug_img_width, debug_img_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
			
			int cellSize_mm = 32;
		    int image_width = 320;
		    int image_height = 240;
			
			Console.WriteLine("Creating sensor models");			
			stereoModel inverseSensorModel = new stereoModel();
			inverseSensorModel.createLookupTable(cellSize_mm, image_width, image_height);
			
            // create a ray
			float FOV_horizontal = 78 * (float)Math.PI / 180.0f;
			inverseSensorModel.FOV_horizontal = FOV_horizontal;
			inverseSensorModel.FOV_vertical = FOV_horizontal * image_height / image_width;
            evidenceRay ray = 
			    inverseSensorModel.createRay(
				    image_width/2, image_height/2, 4, 
					0, 255, 255, 255);
			
			Assert.AreNotEqual(null, ray, "No ray was created");
			Assert.AreNotEqual(null, ray.vertices, "No ray vertices were created");
			
			pos3D[] start_vertices = (pos3D[])ray.vertices.Clone();
			
			Console.WriteLine("x,y,z:  " + start_vertices[0].x.ToString() + ", " + start_vertices[0].y.ToString() + ", " + start_vertices[0].z.ToString());
			for (int i = 0; i <  ray.vertices.Length; i++)
			{
				int j = i + 1;
				if (j == ray.vertices.Length) j = 0;
				int x0 = (debug_img_width/2) + (int)ray.vertices[i].x/50;
				int y0 = (debug_img_height/2) + (int)ray.vertices[i].y/50;
				int x1 = (debug_img_width/2) + (int)ray.vertices[j].x/50;
				int y1 = (debug_img_height/2) + (int)ray.vertices[j].y/50;
				drawing.drawLine(debug_img, debug_img_width, debug_img_height, x0,y0,x1,y1,0,255,0,0,false);
			}
			
			float angle_degrees = 30;
			float angle_radians = angle_degrees / 180.0f * (float)Math.PI;
			pos3D rotation = new pos3D(0, 0, 0);
			rotation.pan = angle_degrees;
			ray.translateRotate(rotation);
			
			Console.WriteLine("x,y,z:  " + ray.vertices[0].x.ToString() + ", " + ray.vertices[0].y.ToString() + ", " + ray.vertices[0].z.ToString());
			for (int i = 0; i <  ray.vertices.Length; i++)
			{
				int j = i + 1;
				if (j == ray.vertices.Length) j = 0;
				int x0 = (debug_img_width/2) + (int)ray.vertices[i].x/50;
				int y0 = (debug_img_height/2) + (int)ray.vertices[i].y/50;
				int x1 = (debug_img_width/2) + (int)ray.vertices[j].x/50;
				int y1 = (debug_img_height/2) + (int)ray.vertices[j].y/50;
				drawing.drawLine(debug_img, debug_img_width, debug_img_height, x0,y0,x1,y1,255,0,0,0,false);
			}

			BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp);
			bmp.Save("tests_occupancygrid_simple_EvidenceRayRotation.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);			
		}
示例#40
0
        /// <summary>
        /// Localisation
        /// </summary>
        /// <param name="body_width_mm">width of the robot body in millimetres</param>
        /// <param name="body_length_mm">length of the robot body in millimetres</param>
        /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_pan">head pan angle in radians</param>
        /// <param name="head_tilt">head tilt angle in radians</param>
        /// <param name="head_roll">head roll angle in radians</param>
        /// <param name="baseline_mm">stereo camera baseline in millimetres</param>
        /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param>
        /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param>
        /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param>
        /// <param name="image_width">image width for each stereo camera</param>
        /// <param name="image_height">image height for each stereo camera</param>
        /// <param name="FOV_degrees">field of view for each stereo camera in degrees</param>
        /// <param name="stereo_features">stereo features (disparities) for each stereo camera</param>
        /// <param name="stereo_features_colour">stereo feature colours for each stereo camera</param>
        /// <param name="stereo_features_uncertainties">stereo feature uncertainties (priors) for each stereo camera</param>
        /// <param name="sensormodel">sensor model for each stereo camera</param>
        /// <param name="left_camera_location">returned position and orientation of the left camera on each stereo camera</param>
        /// <param name="right_camera_location">returned position and orientation of the right camera on each stereo camera</param>
        /// <param name="no_of_samples">number of sample poses</param>
        /// <param name="sampling_radius_major_mm">major radius for samples, in the direction of robot movement</param>
        /// <param name="sampling_radius_minor_mm">minor radius for samples, perpendicular to the direction of robot movement</param>
        /// <param name="robot_pose">current estimated position and orientation of the robots centre of rotation, from which each stereo camera took its observations</param>
        /// <param name="max_orientation_variance">maximum variance in orientation in radians, used to create sample poses</param>
        /// <param name="max_tilt_variance">maximum variance in tilt angle in radians, used to create sample poses</param>
        /// <param name="max_roll_variance">maximum variance in roll angle in radians, used to create sample poses</param>
        /// <param name="poses">list of poses tried</param>
        /// <param name="pose_score">list of pose matching scores</param>
        /// <param name="pose_offset">offset of the best pose from the current one</param>
        /// <param name="rnd">random number generator</param>
        /// <param name="pose_offset">returned pose offset</param>
        /// <param name="img_poses">optional image within which to show poses</param>
        /// <param name="img_poses_width">width of the poses image</param>
        /// <param name="img_poses_height">height of the poses image</param>
        /// <returns>best localisation matching score</returns>
        public float Localise(
		    float body_width_mm,
		    float body_length_mm,
		    float body_centre_of_rotation_x,
		    float body_centre_of_rotation_y,
		    float body_centre_of_rotation_z,
		    float head_centroid_x,
		    float head_centroid_y,
		    float head_centroid_z,
		    float head_pan,
		    float head_tilt,
		    float head_roll,
		    float[] baseline_mm,
		    float[] stereo_camera_position_x,
		    float[] stereo_camera_position_y,
		    float[] stereo_camera_position_z,
		    float[] stereo_camera_pan,
		    float[] stereo_camera_tilt,
		    float[] stereo_camera_roll,
            int[] image_width,
            int[] image_height,
            float[] FOV_degrees,
		    float[][] stereo_features,
		    byte[][,] stereo_features_colour,
		    float[][] stereo_features_uncertainties,
            stereoModel[][] sensormodel,
            ref pos3D[] left_camera_location,
            ref pos3D[] right_camera_location,
            int no_of_samples,
            float sampling_radius_major_mm,
            float sampling_radius_minor_mm,
            pos3D[] robot_pose,
            float max_orientation_variance,
            float max_tilt_variance,
            float max_roll_variance,
            List<pos3D> poses,
            List<float> pose_score,
		    Random rnd,
            ref pos3D pose_offset,
            byte[] img_poses,
            int img_poses_width,
            int img_poses_height,
            float known_best_pose_x_mm,
            float known_best_pose_y_mm)
        {
            float best_matching_score = float.MinValue;

            poses.Clear();
            pose_score.Clear();
            gridCells.CreateMoireGrid(
                sampling_radius_major_mm,
                sampling_radius_minor_mm,
                no_of_samples,
                robot_pose[0].pan,
                robot_pose[0].tilt,
                robot_pose[0].roll,
                max_orientation_variance,
                max_tilt_variance,
                max_roll_variance,
                rnd,
                ref poses,
                null, null, 0, 0);

            // positions of the left and right camera relative to the robots centre of rotation
            pos3D head_location = new pos3D(0, 0, 0);
            left_camera_location = new pos3D[baseline_mm.Length];
            right_camera_location = new pos3D[baseline_mm.Length];
            pos3D[] camera_centre_location = new pos3D[baseline_mm.Length];
            pos3D[] relative_left_cam = new pos3D[baseline_mm.Length];
            pos3D[] relative_right_cam = new pos3D[baseline_mm.Length];
            for (int cam = 0; cam < baseline_mm.Length; cam++)
            {
                occupancygridBase.calculateCameraPositions(
                    body_width_mm,
                    body_length_mm,
                    body_centre_of_rotation_x,
                    body_centre_of_rotation_y,
                    body_centre_of_rotation_z,
                    head_centroid_x,
                    head_centroid_y,
                    head_centroid_z,
                    head_pan,
                    head_tilt,
                    head_roll,
                    baseline_mm[cam],
                    stereo_camera_position_x[cam],
                    stereo_camera_position_y[cam],
                    stereo_camera_position_z[cam],
                    stereo_camera_pan[cam],
                    stereo_camera_tilt[cam],
                    stereo_camera_roll[cam],
                    ref head_location,
                    ref camera_centre_location[cam],
                    ref relative_left_cam[cam],
                    ref relative_right_cam[cam]);

                left_camera_location[cam] = relative_left_cam[cam].translate(robot_pose[cam].x, robot_pose[cam].y, robot_pose[cam].z);
                right_camera_location[cam] = relative_right_cam[cam].translate(robot_pose[cam].x, robot_pose[cam].y, robot_pose[cam].z);
            }

            pose_score.Clear();
            for (int p = 0; p < poses.Count; p++)
            {
                pose_score.Add(0);
            }

            int no_of_zero_probabilities = 0;

            // try a number of random poses
            // we can do this in parallel
            Parallel.For(0, poses.Count, delegate(int i)
            //for (int i = 0; i < poses.Count; i++)
            {
                pos3D sample_pose = poses[i];

                float matching_score = 0;

                for (int cam = 0; cam < baseline_mm.Length; cam++)
                {
                    // update the position of the left camera for this pose
                    pos3D sample_pose_left_cam = relative_left_cam[cam].add(sample_pose);
                    sample_pose_left_cam.pan = 0;
                    sample_pose_left_cam.tilt = 0;
                    sample_pose_left_cam.roll = 0;
                    sample_pose_left_cam = sample_pose_left_cam.rotate(sample_pose.pan, sample_pose.tilt, sample_pose.roll);
                    sample_pose_left_cam.x += robot_pose[cam].x;
                    sample_pose_left_cam.y += robot_pose[cam].y;
                    sample_pose_left_cam.z += robot_pose[cam].z;

                    // update the position of the right camera for this pose
                    pos3D sample_pose_right_cam = relative_right_cam[cam].add(sample_pose);
                    sample_pose_right_cam.pan = 0;
                    sample_pose_right_cam.tilt = 0;
                    sample_pose_right_cam.roll = 0;
                    sample_pose_right_cam = sample_pose_right_cam.rotate(sample_pose.pan, sample_pose.tilt, sample_pose.roll);
                    sample_pose_right_cam.x += robot_pose[cam].x;
                    sample_pose_right_cam.y += robot_pose[cam].y;
                    sample_pose_right_cam.z += robot_pose[cam].z;

                    // centre position between the left and right cameras
                    pos3D stereo_camera_centre = 
                        new pos3D(
                            sample_pose_left_cam.x + ((sample_pose_right_cam.x - sample_pose_left_cam.x) * 0.5f),
                            sample_pose_left_cam.y + ((sample_pose_right_cam.y - sample_pose_left_cam.y) * 0.5f),
                            sample_pose_left_cam.z + ((sample_pose_right_cam.z - sample_pose_left_cam.z) * 0.5f));

                    stereo_camera_centre.pan = head_pan + stereo_camera_pan[cam] + sample_pose.pan;
                    stereo_camera_centre.tilt = head_tilt + stereo_camera_tilt[cam] + sample_pose.tilt;
                    stereo_camera_centre.roll = head_roll + stereo_camera_roll[cam] + sample_pose.roll;

                    // create a set of stereo rays as observed from this pose
                    List<evidenceRay> rays = sensormodel[cam][0].createObservation(
                        stereo_camera_centre,
                        baseline_mm[cam],
                        image_width[cam],
                        image_height[cam],
                        FOV_degrees[cam],
                        stereo_features[cam],
                        stereo_features_colour[cam],
                        stereo_features_uncertainties[cam],
                        true);

                    // insert rays into the occupancy grid
                    for (int r = 0; r < rays.Count; r++)
                    {
                        float score = Insert(rays[r], sensormodel[cam], sample_pose_left_cam, sample_pose_right_cam, true);
                        if (score != occupancygridBase.NO_OCCUPANCY_EVIDENCE)
                        {
                            if (matching_score == occupancygridBase.NO_OCCUPANCY_EVIDENCE) matching_score = 0;
                            matching_score += score;
                        }
                    }
                }

                // add the pose to the list
                sample_pose.pan -= robot_pose[0].pan;
                sample_pose.tilt -= robot_pose[0].tilt;
                sample_pose.roll -= robot_pose[0].roll;

                if (Math.Abs(sample_pose.pan) > max_orientation_variance)
                {
                    Console.WriteLine("Pose variance out of range");
                }

                if (matching_score != occupancygridBase.NO_OCCUPANCY_EVIDENCE)
                {
                    float prob = probabilities.LogOddsToProbability(matching_score);
                    if (prob == 0) no_of_zero_probabilities++;
                    pose_score[i] = prob;
                    //pose_score[i] = matching_score;
                }
            } );

            if (no_of_zero_probabilities == poses.Count)
            {
                Console.WriteLine("Localisation failure");
                pose_offset = new pos3D(0, 0, 0);
                best_matching_score = occupancygridBase.NO_OCCUPANCY_EVIDENCE;
            }
            else
            {
                // locate the best possible pose
                pos3D best_robot_pose = new pos3D(0, 0, 0);
                gridCells.FindBestPose(poses, pose_score, ref best_robot_pose, sampling_radius_major_mm, img_poses, null, img_poses_width, img_poses_height, known_best_pose_x_mm, known_best_pose_y_mm);

                // rotate the best pose to the robot's current orientation
                // this becomes an offset, which may be used for course correction
                pose_offset = best_robot_pose.rotate(robot_pose[0].pan, robot_pose[0].tilt, robot_pose[0].roll);

                // orientation relative to the current heading
                pose_offset.pan = best_robot_pose.pan;
                pose_offset.tilt = best_robot_pose.tilt;
                pose_offset.roll = best_robot_pose.roll;

                // range checks                        
                if (Math.Abs(pose_offset.pan) > max_orientation_variance) Console.WriteLine("pose_offset pan out of range");
                if (Math.Abs(pose_offset.tilt) > max_tilt_variance) Console.WriteLine("pose_offset tilt out of range");
                if (Math.Abs(pose_offset.roll) > max_roll_variance) Console.WriteLine("pose_offset roll out of range");
            }

            return (best_matching_score);
        }
示例#41
0
        /// <summary>
        /// returns a score indicating how closely matched the two viewpoints are
        /// </summary>
        /// <param name="other"></param>
        /// <param name="intersects"></param>
        /// <returns></returns>
        public float matchingScore(viewpoint other, float separation_tollerance, int ray_thickness, ArrayList intersects)
        {
            float separation;
            float score        = 0;
            pos3D intersectPos = new pos3D(0, 0, 0);

            if (ray_thickness < 1)
            {
                ray_thickness = 1;
            }

            for (int cam1 = 0; cam1 < rays.Length; cam1++)
            {
                for (int ry1 = 0; ry1 < rays[cam1].Count; ry1++)
                {
                    evidenceRay ray1       = (evidenceRay)rays[cam1][ry1];
                    int         pan_index1 = ray1.pan_index - 1;
                    int         pan_index2 = ray1.pan_index;
                    int         pan_index3 = ray1.pan_index + 1;
                    if (pan_index1 < 0)
                    {
                        pan_index1 = evidenceRay.pan_steps - 1;
                    }
                    if (pan_index3 >= evidenceRay.pan_steps)
                    {
                        pan_index3 = 0;
                    }

                    int cam2 = cam1;
                    //for (int cam2 = 0; cam2 < other.rays.Length; cam2++)
                    {
                        for (int ry2 = 0; ry2 < other.rays[cam2].Count; ry2++)
                        {
                            evidenceRay ray2       = (evidenceRay)other.rays[cam2][ry2];
                            int         pan_index4 = ray2.pan_index;
                            if ((pan_index1 == pan_index4) ||
                                (pan_index2 == pan_index4) ||
                                (pan_index3 == pan_index4))
                            {
                                //do these rays intersect
                                separation = 999;
                                if (stereoModel.raysOverlap(ray1, ray2, intersectPos, separation_tollerance, ray_thickness, ref separation))
                                {
                                    float p1   = ray1.probability(intersectPos.x, intersectPos.y);
                                    float p2   = ray2.probability(intersectPos.x, intersectPos.y);
                                    float prob = (p1 * p2) + ((1.0f - p1) * (1.0f - p2));

                                    if (intersects != null)
                                    {
                                        // add the intersection to the list
                                        intersects.Add(intersectPos);
                                        intersectPos = new pos3D(0, 0, 0);
                                    }

                                    //increment the matching score
                                    score += 1.0f / (1.0f + ((1.0f - prob) * separation * separation));
                                    //score += 1.0f / (1.0f + (separation * separation) + (ray2.length * ray1.length / 2));
                                    //score += 1.0f / (1.0f + (separation * separation));
                                }
                            }
                        }
                    }
                }
            }
            return(score);
        }
示例#42
0
		/// <summary>
		/// localise and return offset values
		/// </summary>
		/// <param name="stereo_features">disparities for each stereo camera (x,y,disparity)</param>
		/// <param name="stereo_features_colour">colour for each disparity</param>
		/// <param name="stereo_features_uncertainties">uncertainty for each disparity</param>
		/// <param name="debug_mapping_filename">optional filename used for saving debugging info</param>
		/// <param name="known_offset_x_mm">ideal x offset, if known</param>
		/// <param name="known_offset_y_mm">ideal y offset, if known</param>
		/// <param name="offset_x_mm">returned x offset</param>
		/// <param name="offset_y_mm">returned y offset</param>
		/// <param name="offset_z_mm">returned z offset</param>
		/// <param name="offset_pan_radians">returned pan</param>
		/// <param name="offset_tilt_radians">returned tilt</param>
		/// <param name="offset_roll_radians">returned roll</param>
		/// <returns>true if the localisation was valid</returns>
		public bool Localise(
		    float[][] stereo_features,
		    byte[][,] stereo_features_colour,
		    float[][] stereo_features_uncertainties,		                     
		    string debug_mapping_filename,
		    float known_offset_x_mm,
		    float known_offset_y_mm,
		    ref float offset_x_mm,
		    ref float offset_y_mm,
		    ref float offset_z_mm,
		    ref float offset_pan_radians,
		    ref float offset_tilt_radians,
		    ref float offset_roll_radians)
		{
		    int overall_img_width = 640;
		    int overall_img_height = 480;
			bool valid_localisation = true;
			pos3D pose_offset = new pos3D(0,0,0);
			bool buffer_transition = false;
			
            float matching_score = buffer.Localise(
                robot_geometry,
		        stereo_features,
		        stereo_features_colour,
		        stereo_features_uncertainties,
		        rnd,
                ref pose_offset,
                ref buffer_transition,
                debug_mapping_filename,
                known_offset_x_mm, 
			    known_offset_y_mm,
		        overall_map_filename,
		        ref overall_map_img,
		        overall_img_width,
		        overall_img_height,
		        overall_map_dimension_mm,
		        overall_map_centre_x_mm,
		        overall_map_centre_y_mm);
			
			if (matching_score == occupancygridBase.NO_OCCUPANCY_EVIDENCE)
				valid_localisation = false;
			
		    offset_x_mm = pose_offset.x;
		    offset_y_mm = pose_offset.y;
		    offset_z_mm = pose_offset.z;
		    offset_pan_radians = pose_offset.pan;
		    offset_tilt_radians = pose_offset.tilt;
		    offset_roll_radians = pose_offset.roll;
			
			return(valid_localisation);
		}
示例#43
0
        public void PanTilt()
        {
            int pan_angle1 = -40;
            float pan1 = pan_angle1 * (float)Math.PI / 180.0f;
            int tilt_angle1 = 20;
            float tilt1 = tilt_angle1 * (float)Math.PI / 180.0f;
            
            pos3D pos1 = new pos3D(0, 50, 0);
            pos3D pos2 = pos1.rotate_old(pan1,tilt1,0);
            pos3D pos3 = pos1.rotate(pan1,tilt1,0);
            
            float dx = Math.Abs(pos2.x - pos3.x);
            float dy = Math.Abs(pos2.y - pos3.y);
            float dz = Math.Abs(pos2.z - pos3.z);
            Console.WriteLine("pos old: " + pos2.x.ToString() + ",  " + pos2.y.ToString() + ",  " + pos2.z.ToString());
            Console.WriteLine("pos new: " + pos3.x.ToString() + ",  " + pos3.y.ToString() + ",  " + pos3.z.ToString());
            Assert.Less(dx, 1);
            Assert.Less(dy, 1);
            Assert.Less(dz, 1);
	    }
示例#44
0
        /// <summary>
        /// Calculate the position of the robots head and cameras for this pose
        /// the positions returned are relative to the robots centre of rotation
        /// </summary>
        /// <param name="body_width_mm">width of the robot body in millimetres</param>
        /// <param name="body_length_mm">length of the robot body in millimetres</param>
        /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_pan">head pan angle in radians</param>
        /// <param name="head_tilt">head tilt angle in radians</param>
        /// <param name="head_roll">head roll angle in radians</param>
        /// <param name="baseline_mm">stereo camera baseline in millimetres</param>
        /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param>
        /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param>
        /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param>
        /// <param name="head_location">returned location/orientation of the robot head</param>
        /// <param name="camera_centre_location">returned stereo camera centre position/orientation</param>
        /// <param name="left_camera_location">returned left camera position/orientation</param>
        /// <param name="right_camera_location">returned right camera position/orientation</param>
        public static void calculateCameraPositions(
            float body_width_mm,
            float body_length_mm,
            float body_centre_of_rotation_x,
            float body_centre_of_rotation_y,
            float body_centre_of_rotation_z,
            float head_centroid_x,
            float head_centroid_y,
            float head_centroid_z,
            float head_pan,
            float head_tilt,
            float head_roll,
            float baseline_mm,
            float stereo_camera_position_x,
            float stereo_camera_position_y,
            float stereo_camera_position_z,
            float stereo_camera_pan,
            float stereo_camera_tilt,
            float stereo_camera_roll,
            ref pos3D head_location,
            ref pos3D camera_centre_location,
            ref pos3D left_camera_location,
            ref pos3D right_camera_location)
        {
            // calculate the position of the centre of the head relative to
            // the centre of rotation of the robots body
            pos3D head_centroid =
                new pos3D(
                    -(body_width_mm * 0.5f) + (body_centre_of_rotation_x - (body_width_mm * 0.5f)) + head_centroid_x,
                    -(body_length_mm * 0.5f) + (body_centre_of_rotation_y - (body_length_mm * 0.5f)) + head_centroid_y,
                    head_centroid_z);

            // location of the centre of the head
            // adjusted for the robot pose and the head pan and tilt angle.
            // Note that the positions and orientations of individual cameras
            // on the head have already been accounted for within stereoModel.createObservation
            pos3D head_locn =
                head_centroid.rotate(
                    head_pan, head_tilt, 0);

            head_location.copyFrom(head_locn);

            // calculate the position of the centre of the stereo camera
            // (baseline centre point)
            pos3D camera_centre_locn =
                new pos3D(
                    stereo_camera_position_x,
                    stereo_camera_position_y,
                    stereo_camera_position_z);

            // rotate the stereo camera
            camera_centre_locn =
                camera_centre_locn.rotate(
                    stereo_camera_pan + head_pan,
                    stereo_camera_tilt + head_tilt,
                    stereo_camera_roll + head_roll);

            // move the stereo camera relative to the head position
            camera_centre_location =
                camera_centre_locn.translate(
                    head_location.x,
                    head_location.y,
                    head_location.z);

            // where are the left and right cameras?
            // we need to know this for the origins of the vacancy models
            float half_baseline_length = baseline_mm * 0.5f;
            pos3D left_camera_locn     = new pos3D(-half_baseline_length, 0, 0);

            left_camera_locn = left_camera_locn.rotate(stereo_camera_pan + head_pan, stereo_camera_tilt + head_tilt, stereo_camera_roll + head_roll);
            pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z);

            left_camera_location      = left_camera_locn.translate(camera_centre_location.x, camera_centre_location.y, camera_centre_location.z);
            right_camera_location     = right_camera_locn.translate(camera_centre_location.x, camera_centre_location.y, camera_centre_location.z);
            right_camera_location.pan = left_camera_location.pan;
        }
示例#45
0
        public pos3D rotate_old(float pan, float tilt, float roll)
        {
            float hyp;
            pos3D rotated = new pos3D(x, y, z);

            // roll
            //if (roll != 0)
            {
                hyp = (float)Math.Sqrt((rotated.x * rotated.x) + (rotated.z * rotated.z));
                if (hyp > 0)
                {
                    float roll_angle = (float)Math.Acos(rotated.z / hyp);
                    if (rotated.x < 0)
                    {
                        roll_angle = (float)(Math.PI * 2) - roll_angle;
                    }
                    float new_roll_angle = roll + roll_angle;
                    rotated.x = hyp * (float)Math.Sin(new_roll_angle);
                    rotated.z = hyp * (float)Math.Cos(new_roll_angle);
                }
            }
            if (tilt != 0)
            {
                // tilt
                hyp = (float)Math.Sqrt((rotated.y * rotated.y) + (rotated.z * rotated.z));
                if (hyp > 0)
                {
                    float tilt_angle = (float)Math.Acos(rotated.y / hyp);
                    if (rotated.z < 0)
                    {
                        tilt_angle = (float)(Math.PI * 2) - tilt_angle;
                    }
                    float new_tilt_angle = tilt + tilt_angle;
                    rotated.y = hyp * (float)Math.Sin(new_tilt_angle);
                    rotated.z = hyp * (float)Math.Cos(new_tilt_angle);
                }
            }



            //if (pan != 0)
            {
                // pan
                hyp = (float)Math.Sqrt((rotated.x * rotated.x) + (rotated.y * rotated.y));
                if (hyp > 0)
                {
                    float pan_angle = (float)Math.Acos(rotated.y / hyp);
                    if (rotated.x < 0)
                    {
                        pan_angle = (float)(Math.PI * 2) - pan_angle;
                    }
                    rotated.new_pan_angle = pan - pan_angle;
                    rotated.dist_xy       = hyp;
                    rotated.x             = hyp * (float)Math.Sin(rotated.new_pan_angle);
                    rotated.y             = hyp * (float)Math.Cos(rotated.new_pan_angle);
                }
            }
            rotated.pan  = this.pan + pan;
            rotated.tilt = this.tilt + tilt;
            rotated.roll = this.roll + roll;
            return(rotated);
        }
示例#46
0
        /// <summary>
        /// removes low probability poses
        /// Note that a maturity threshold is used, so that poses which may initially
        /// be unfairly judged as improbable have time to prove their worth
        /// </summary>
        private void Prune()
        {
            float max_score = float.MinValue;

            best_path = null;

            // sort poses by score
            for (int i = 0; i < Poses.Count - 1; i++)
            {
                particlePath p1 = Poses[i];

                // keep track of the bounding region within which the path tree occurs
                if (p1.current_pose.x < min_tree_x)
                {
                    min_tree_x = p1.current_pose.x;
                }
                if (p1.current_pose.x > max_tree_x)
                {
                    max_tree_x = p1.current_pose.x;
                }
                if (p1.current_pose.y < min_tree_y)
                {
                    min_tree_y = p1.current_pose.y;
                }
                if (p1.current_pose.y > max_tree_y)
                {
                    max_tree_y = p1.current_pose.y;
                }

                max_score = p1.total_score;
                particlePath winner       = null;
                int          winner_index = 0;
                for (int j = i + 1; j < Poses.Count; j++)
                {
                    particlePath p2 = Poses[i];
                    float        sc = p2.total_score;
                    if ((sc > max_score) ||
                        ((max_score == 0) && (sc != 0)))
                    {
                        max_score    = sc;
                        winner       = p2;
                        winner_index = j;
                    }
                }
                if (winner != null)
                {
                    Poses[i]            = winner;
                    Poses[winner_index] = p1;
                }
            }

            // the best path is at the top
            best_path = Poses[0];

            // It's culling season
            int cull_index = (100 - cull_threshold) * Poses.Count / 100;

            if (cull_index > Poses.Count - 2)
            {
                cull_index = Poses.Count - 2;
            }
            for (int i = Poses.Count - 1; i > cull_index; i--)
            {
                particlePath path = Poses[i];
                if (path.path.Count >= pose_maturation)
                {
                    // remove mapping hypotheses for this path
                    path.Remove(LocalGrid);

                    // now remove the path itself
                    Poses.RemoveAt(i);
                }
            }

            // garbage collect any dead paths (R.I.P.)
            List <particlePath> possible_roots = new List <particlePath>(); // stores paths where all branches have coinverged to a single possibility

            for (int i = ActivePoses.Count - 1; i >= 0; i--)
            {
                particlePath path = ActivePoses[i];

                if ((!path.Enabled) ||
                    ((path.total_children == 0) && (path.branch_pose == null) && (path.current_pose.time_step < time_step - pose_maturation - 5)))
                {
                    ActivePoses.RemoveAt(i);
                }
                else
                {
                    // record any fully collapsed paths
                    if (!path.Collapsed)
                    {
                        if (path.branch_pose == null)
                        {
                            possible_roots.Add(path);
                        }
                        else
                        {
                            if (path.branch_pose.path.Collapsed)
                            {
                                possible_roots.Add(path);
                            }
                        }
                    }
                }
            }

            if (possible_roots.Count == 1)
            {
                // collapse tha psth
                particlePath path = possible_roots[0];

                if (path.branch_pose != null)
                {
                    particlePath previous_path = path.branch_pose.path;
                    previous_path.Distill(LocalGrid);
                    path.branch_pose.parent = null;
                    path.branch_pose        = null;
                }

                path.Collapsed = true;

                // take note of the time step.  This is for use with the display functions only
                root_time_step = path.current_pose.time_step;
            }

            if (best_path != null)
            {
                // update the current robot position with the best available pose
                if (current_robot_pose == null)
                {
                    current_robot_pose =
                        new pos3D(best_path.current_pose.x,
                                  best_path.current_pose.y,
                                  0);
                }
                current_robot_pose.pan   = best_path.current_pose.pan;
                current_robot_path_score = best_path.total_score;

                // generate new poses from the ones which have survived culling
                int new_poses_required = survey_trial_poses; // -Poses.Count;
                int max = Poses.Count;
                int n = 0, added = 0;
                while ((max > 0) &&
                       (n < new_poses_required * 4) &&
                       (added < new_poses_required))
                {
                    // identify a potential parent at random,
                    // from one of the surviving paths
                    int          random_parent_index = rnd.Next(max - 1);
                    particlePath parent_path         = Poses[random_parent_index];

                    // only mature parents can have children
                    if (parent_path.path.Count >= pose_maturation)
                    {
                        // generate a child branching from the parent path
                        particlePath child_path = new particlePath(parent_path, path_ID, rob.LocalGridDimension);
                        createNewPose(child_path);
                        added++;
                    }
                    n++;
                }

                // like salmon, after parents spawn they die
                if (added > 0)
                {
                    for (int i = max - 1; i >= 0; i--)
                    {
                        Poses.RemoveAt(i);
                    }
                }

                // if the robot has rotated through a large angle since
                // the previous time step then reset the scan matching estimate
                //if (Math.Abs(best_path.current_pose.pan - rob.pan) > rob.ScanMatchingMaxPanAngleChange * Math.PI / 180)
                rob.ScanMatchingPanAngleEstimate = scanMatching.NOT_MATCHED;
            }


            PosesEvaluated = false;
        }
示例#47
0
		/// <summary>
		/// returns a set of evidence rays based upon the given observation
		/// </summary>
		/// <param name="observer">observer position and orientation</param>
		/// <param name="baseline">baseline distance</param>
		/// <param name="image_width">camera image width</param>
		/// <param name="image_height">camera image height</param>
		/// <param name="FOV_degrees">horizontal camera field of view in degrees</param>
		/// <param name="stereo_features">stereo features, as groups of three figures</param>
		/// <param name="stereo_features_colour">colour of each stereo feature</param>
		/// <param name="stereo_features_uncertainties">uncertainty value associated with each stereo feature</param>
		/// <param name="translate">translate the ray to be relative to the observer position, otherwise it's relative to (0,0,0)</param>
		/// <returns>list of evidence rays, centred at (0, 0, 0)</returns>
        public List<evidenceRay> createObservation(
		    pos3D observer,
		    float baseline,
		    int image_width,
		    int image_height,
		    float FOV_degrees,
		    float[] stereo_features,
		    byte[,] stereo_features_colour,
		    float[] stereo_features_uncertainties,
		    bool translate)
        {
            List<evidenceRay> result = new List<evidenceRay>();

            // get essential data for this stereo camera
            FOV_horizontal = FOV_degrees * (float)Math.PI / 180.0f;
            FOV_vertical = FOV_horizontal * image_height / image_width;

            // calc uncertainty in angle (+/- half a pixel)
            float angular_uncertainty = FOV_horizontal / (image_width * 2);
			//sigma = 100 * (float)Math.Tan(angular_uncertainty); // 100 is the factor used in RaysIntersection
			sigma = 1.0f / (image_width * 1) * FOV_horizontal;

            // some head geometry
            pos3D headOrientation = observer;
            pos3D cameraOrientation = observer;
			if (!translate) cameraOrientation = new pos3D(0, 0, 0);
            cameraOrientation.pan = headOrientation.pan;
            cameraOrientation.tilt = headOrientation.tilt;
            cameraOrientation.roll = headOrientation.roll;

            if (stereo_features != null)  // if there are stereo features associated with this camera
            {
                int f2 = 0;
                for (int f = 0; f < stereo_features.Length; f += 3)
                {
                    // get the parameters of the feature
                    float image_x = stereo_features[f];
                    float image_y = stereo_features[f + 1];
                    float disparity = stereo_features[f + 2];

                    // create a ray
                    evidenceRay ray = 
						createRay(
						    image_x, image_y, disparity, 
						    1 + stereo_features_uncertainties[f2],
                            stereo_features_colour[f2, 0],
                            stereo_features_colour[f2, 1],
                            stereo_features_colour[f2, 2]);

                    if (ray != null)
                    {
                        // convert from camera-centric coordinates to head coordinates
                        ray.translateRotate(cameraOrientation);

                        // add to the result
                        result.Add(ray);
                    }
                    f2++;
                }
            }

            return (result);
        }
示例#48
0
        /// <summary>
        /// parse an xml node to extract geometry parameters
        /// </summary>
        /// <param name="xnod"></param>
        /// <param name="level"></param>
        public void LoadFromXml(
            XmlNode xnod, int level,
            ref int cameraIndex)
        {
            XmlNode xnodWorking;

            if (xnod.Name == "StereoCamera")
            {
                cameraIndex++;
            }
            if (xnod.Name == "BodyWidthMillimetres")
            {
                body_width_mm = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "BodyLengthMillimetres")
            {
                body_length_mm = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "BodyHeightMillimetres")
            {
                body_height_mm = Convert.ToSingle(xnod.InnerText);
            }

            if (xnod.Name == "CentreOfRotationX")
            {
                body_centre_of_rotation_x = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "CentreOfRotationY")
            {
                body_centre_of_rotation_y = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "CentreOfRotationZ")
            {
                body_centre_of_rotation_z = Convert.ToSingle(xnod.InnerText);
            }

            if (xnod.Name == "HeadCentreX")
            {
                head_centroid_x = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "HeadCentreY")
            {
                head_centroid_y = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "HeadCentreZ")
            {
                head_centroid_z = Convert.ToSingle(xnod.InnerText);
            }

            if (xnod.Name == "HeadDiameterMillimetres")
            {
                head_diameter_mm = Convert.ToSingle(xnod.InnerText);
            }

            if (xnod.Name == "MaxOrientationVarianceDegrees")
            {
                max_orientation_variance = Convert.ToSingle(xnod.InnerText) / 180.0f * (float)Math.PI;
            }
            if (xnod.Name == "MaxTiltVarianceDegrees")
            {
                max_tilt_variance = Convert.ToSingle(xnod.InnerText) / 180.0f * (float)Math.PI;
            }
            if (xnod.Name == "MaxRollVarianceDegrees")
            {
                max_roll_variance = Convert.ToSingle(xnod.InnerText) / 180.0f * (float)Math.PI;
            }

            if (xnod.Name == "NoOfStereoCameras")
            {
                cameraIndex = -1;
                int no_of_stereo_cameras = Convert.ToInt32(xnod.InnerText);

                baseline_mm = new float[no_of_stereo_cameras];
                pose        = new pos3D[no_of_stereo_cameras];
                poses       = new List <pos3D>();
                stereo_camera_position_x = new float[no_of_stereo_cameras];
                stereo_camera_position_y = new float[no_of_stereo_cameras];
                stereo_camera_position_z = new float[no_of_stereo_cameras];
                stereo_camera_pan        = new float[no_of_stereo_cameras];
                stereo_camera_tilt       = new float[no_of_stereo_cameras];
                stereo_camera_roll       = new float[no_of_stereo_cameras];
                image_width           = new int[no_of_stereo_cameras];
                image_height          = new int[no_of_stereo_cameras];
                FOV_degrees           = new float[no_of_stereo_cameras];
                left_camera_location  = new pos3D[no_of_stereo_cameras];
                right_camera_location = new pos3D[no_of_stereo_cameras];
                for (int i = 0; i < no_of_stereo_cameras; i++)
                {
                    left_camera_location[i]  = new pos3D(0, 0, 0);
                    right_camera_location[i] = new pos3D(0, 0, 0);
                }
            }

            if (xnod.Name == "BaselineMillimetres")
            {
                baseline_mm[cameraIndex] = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "PositionMillimetres")
            {
                string[] str = xnod.InnerText.Split(' ');
                stereo_camera_position_x[cameraIndex] = Convert.ToSingle(str[0]);
                stereo_camera_position_y[cameraIndex] = Convert.ToSingle(str[1]);
                stereo_camera_position_z[cameraIndex] = Convert.ToSingle(str[2]);
            }
            if (xnod.Name == "OrientationDegrees")
            {
                string[] str = xnod.InnerText.Split(' ');
                stereo_camera_pan[cameraIndex]  = Convert.ToSingle(str[0]) / 180.0f * (float)Math.PI;
                stereo_camera_tilt[cameraIndex] = Convert.ToSingle(str[1]) / 180.0f * (float)Math.PI;
                stereo_camera_roll[cameraIndex] = Convert.ToSingle(str[2]) / 180.0f * (float)Math.PI;
            }

            if (xnod.Name == "ImageDimensions")
            {
                string[] str = xnod.InnerText.Split(' ');
                image_width[cameraIndex]  = Convert.ToInt32(str[0]);
                image_height[cameraIndex] = Convert.ToInt32(str[1]);
            }

            if (xnod.Name == "FOVDegrees")
            {
                FOV_degrees[cameraIndex] = Convert.ToSingle(xnod.InnerText);
            }

            // call recursively on all children of the current node
            if (xnod.HasChildNodes)
            {
                xnodWorking = xnod.FirstChild;
                while (xnodWorking != null)
                {
                    LoadFromXml(xnodWorking, level + 1, ref cameraIndex);
                    xnodWorking = xnodWorking.NextSibling;
                }
            }
        }
示例#49
0
        /// <summary>
        /// calculate the position of the robots head and cameras for this pose
        /// </summary>
        /// <param name="rob">robot object</param>
        /// <param name="head_location">location of the centre of the head</param>
        /// <param name="camera_centre_location">location of the centre of each stereo camera</param>
        /// <param name="left_camera_location">location of the left camera within each stereo camera</param>
        /// <param name="right_camera_location">location of the right camera within each stereo camera</param>
        protected void calculateCameraPositions(
            robot rob,
            ref pos3D head_location,
            ref pos3D[] camera_centre_location,
            ref pos3D[] left_camera_location,
            ref pos3D[] right_camera_location)
        {
            // calculate the position of the centre of the head relative to 
            // the centre of rotation of the robots body
            pos3D head_centroid = new pos3D(-(rob.BodyWidth_mm / 2) + rob.head.x,
                                            -(rob.BodyLength_mm / 2) + rob.head.y,
                                            rob.head.z);

            // location of the centre of the head on the grid map
            // adjusted for the robot pose and the head pan and tilt angle.
            // Note that the positions and orientations of individual cameras
            // on the head have already been accounted for within stereoModel.createObservation
            pos3D head_locn = head_centroid.rotate(rob.head.pan + pan, rob.head.tilt, 0);
            head_locn = head_locn.translate(x, y, 0);
            head_location.copyFrom(head_locn);

            for (int cam = 0; cam < rob.head.no_of_stereo_cameras; cam++)
            {
                // calculate the position of the centre of the stereo camera
                // (baseline centre point)
                pos3D camera_centre_locn = new pos3D(rob.head.calibration[cam].positionOrientation.x, rob.head.calibration[cam].positionOrientation.y, rob.head.calibration[cam].positionOrientation.y);
                camera_centre_locn = camera_centre_locn.rotate(rob.head.calibration[cam].positionOrientation.pan + rob.head.pan + pan, rob.head.calibration[cam].positionOrientation.tilt, rob.head.calibration[cam].positionOrientation.roll);
                camera_centre_location[cam] = camera_centre_locn.translate(head_location.x, head_location.y, head_location.z);

                // where are the left and right cameras?
                // we need to know this for the origins of the vacancy models
                float half_baseline_length = rob.head.calibration[cam].baseline / 2;
                pos3D left_camera_locn = new pos3D(-half_baseline_length, 0, 0);
                left_camera_locn = left_camera_locn.rotate(rob.head.calibration[cam].positionOrientation.pan + rob.head.pan + pan, rob.head.calibration[cam].positionOrientation.tilt, rob.head.calibration[cam].positionOrientation.roll);
                pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z);
                left_camera_location[cam] = left_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z);
                right_camera_location[cam] = right_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z);
                right_camera_location[cam].pan = left_camera_location[cam].pan;
            }
        }
示例#50
0
        /// <summary>
        /// add an observation taken from this pose
        /// </summary>
        /// <param name="stereo_rays">list of ray objects in this observation</param>
        /// <param name="rob">robot object</param>
        /// <param name="LocalGrid">occupancy grid into which to insert the observation</param>
        /// <param name="localiseOnly">if true does not add any mapping particles (pure localisation)</param>
        /// <returns>localisation matching score</returns>
        public float AddObservation(List <evidenceRay>[] stereo_rays,
                                    robot rob,
                                    occupancygridMultiHypothesis LocalGrid,
                                    bool localiseOnly)
        {
            // clear the localisation score
            float localisation_score = occupancygridCellMultiHypothesis.NO_OCCUPANCY_EVIDENCE;

            // get the positions of the head and cameras for this pose
            pos3D head_location = new pos3D(0, 0, 0);

            pos3D[] camera_centre_location = new pos3D[rob.head.no_of_stereo_cameras];
            pos3D[] left_camera_location   = new pos3D[rob.head.no_of_stereo_cameras];
            pos3D[] right_camera_location  = new pos3D[rob.head.no_of_stereo_cameras];
            calculateCameraPositions(rob, ref head_location,
                                     ref camera_centre_location,
                                     ref left_camera_location,
                                     ref right_camera_location);

            // itterate for each stereo camera
            int cam = stereo_rays.Length - 1;

            while (cam >= 0)
            {
                // itterate through each ray
                int r = stereo_rays[cam].Count - 1;
                while (r >= 0)
                {
                    // observed ray.  Note that this is in an egocentric
                    // coordinate frame relative to the head of the robot
                    evidenceRay ray = stereo_rays[cam][r];

                    // translate and rotate this ray appropriately for the pose
                    evidenceRay trial_ray = ray.trialPose(camera_centre_location[cam].pan,
                                                          camera_centre_location[cam].x,
                                                          camera_centre_location[cam].y);

                    // update the grid cells for this ray and update the
                    // localisation score accordingly
                    float score =
                        LocalGrid.Insert(trial_ray, this,
                                         rob.head.sensormodel[cam],
                                         left_camera_location[cam],
                                         right_camera_location[cam],
                                         localiseOnly);
                    if (score != occupancygridCellMultiHypothesis.NO_OCCUPANCY_EVIDENCE)
                    {
                        if (localisation_score != occupancygridCellMultiHypothesis.NO_OCCUPANCY_EVIDENCE)
                        {
                            localisation_score += score;
                        }
                        else
                        {
                            localisation_score = score;
                        }
                    }
                    r--;
                }
                cam--;
            }

            return(localisation_score);
        }
示例#51
0
        /// <summary>
        /// moves to the next grid in the sequence, if necessary
        /// </summary>
        /// <param name="current_grid_index">index of the current local grid</param>
        /// <param name="current_disparity_index">index of teh current disparities set within the disparities file</param>
        /// <param name="robot_pose">the current robot pose</param>
        /// <param name="buffer">buffer containing two metagrids</param>
        /// <param name="current_buffer_index">index of the currently active grid within the buffer (0 or 1)</param>
        /// <param name="grid_centres">list of grid centre positions (x,y,z)</param>
        /// <param name="update_map">returns whether the map should be updated</param>
        /// <param name="debug_mapping_filename">filename to save debugging images</param>
        /// <param name="overall_map_filename">filename to save an overall map of the path</param>
        /// <param name="overall_map_img">overall map image data</param>
        /// <param name="overall_map_dimension_mm">dimension of the overall map in millimetres</param>
        /// <param name="overall_map_centre_x_mm">centre x position of the overall map in millimetres</param>
        /// <param name="overall_map_centre_y_mm">centre x position of the overall map in millimetres</param>
        /// <returns>true if we have transitioned from one grid to the next</returns>
        public static bool MoveToNextLocalGrid(
            ref int current_grid_index,
            ref int current_disparity_index,
            pos3D robot_pose,
            metagrid[] buffer,
            ref int current_buffer_index,
            List<float> grid_centres,
            ref bool update_map,
            string debug_mapping_filename,
		    string overall_map_filename,
		    ref byte[] overall_map_img,
		    int overall_img_width,
		    int overall_img_height,		                                       
		    int overall_map_dimension_mm,
		    int overall_map_centre_x_mm,
		    int overall_map_centre_y_mm)
        {
            bool buffer_transition = false;
            update_map = false;
            
            // if this is the first time that localisation
            // has been called since loading the path
            // then update the map
            if ((current_grid_index == 0) &&
                (current_disparity_index == 0))
            {
                update_map = true;

                float grid_centre_x_mm = grid_centres[current_grid_index * 3];
                float grid_centre_y_mm = grid_centres[(current_grid_index * 3) + 1];
                float grid_centre_z_mm = grid_centres[(current_grid_index * 3) + 2];
                buffer[current_buffer_index].SetPosition(grid_centre_x_mm, grid_centre_y_mm, grid_centre_z_mm, 0);
                int next_grid_index = current_grid_index + 1;
                if (next_grid_index >= grid_centres.Count / 3) next_grid_index = current_grid_index;
                grid_centre_x_mm = grid_centres[next_grid_index * 3];
                grid_centre_y_mm = grid_centres[(next_grid_index * 3) + 1];
                grid_centre_z_mm = grid_centres[(next_grid_index * 3) + 2];
                buffer[1 - current_buffer_index].SetPosition(grid_centre_x_mm, grid_centre_y_mm, grid_centre_z_mm, 0);
            }
        
            // distance to the centre of the currently active grid
            float dx = robot_pose.x - buffer[current_buffer_index].x;
            float dy = robot_pose.y - buffer[current_buffer_index].y;
            float dz = robot_pose.z - buffer[current_buffer_index].z;
            float dist_to_grid_centre_sqr_0 = dx*dx + dy*dy + dz*dz;
            dx = robot_pose.x - buffer[1 - current_buffer_index].x;
            dy = robot_pose.y - buffer[1 - current_buffer_index].y;
            dz = robot_pose.z - buffer[1 - current_buffer_index].z;
            float dist_to_grid_centre_sqr_1 = dx*dx + dy*dy + dz*dz;
            
            // if we are closer to the next grid than the current one
            // then swap the currently active grid
            //if (dist_to_grid_centre_sqr_0 > dimension_mm/2)
            if (dist_to_grid_centre_sqr_1 < dist_to_grid_centre_sqr_0)
            {
                if ((debug_mapping_filename != null) &&
                    (debug_mapping_filename != ""))
                {
                    bool show_localisation = true;
                    int debug_img_width = 640;
                    int debug_img_height = 480;
                    byte[] debug_img = new byte[debug_img_width * debug_img_height * 3];
                    Bitmap debug_bmp = new Bitmap(debug_img_width, debug_img_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
                    buffer[current_buffer_index].Show(0, debug_img, debug_img_width, debug_img_height, false, show_localisation);
                    BitmapArrayConversions.updatebitmap_unsafe(debug_img, debug_bmp);
                    if (debug_mapping_filename.ToLower().EndsWith("png"))
                        debug_bmp.Save(debug_mapping_filename, System.Drawing.Imaging.ImageFormat.Png);
                    if (debug_mapping_filename.ToLower().EndsWith("gif"))
                        debug_bmp.Save(debug_mapping_filename, System.Drawing.Imaging.ImageFormat.Gif);
                    if (debug_mapping_filename.ToLower().EndsWith("jpg"))
                        debug_bmp.Save(debug_mapping_filename, System.Drawing.Imaging.ImageFormat.Jpeg);
                    if (debug_mapping_filename.ToLower().EndsWith("bmp"))
                        debug_bmp.Save(debug_mapping_filename, System.Drawing.Imaging.ImageFormat.Bmp);

					/*
                    string[] str = debug_mapping_filename.Split('.');
                    string debug_mapping_filename2 = str[0] + "b." + str[1];
                    buffer[1 - current_buffer_index].Show(0, debug_img, debug_img_width, debug_img_height, false);
                    BitmapArrayConversions.updatebitmap_unsafe(debug_img, debug_bmp);
                    if (debug_mapping_filename2.ToLower().EndsWith("png"))
                        debug_bmp.Save(debug_mapping_filename2, System.Drawing.Imaging.ImageFormat.Png);
                    if (debug_mapping_filename2.ToLower().EndsWith("gif"))
                        debug_bmp.Save(debug_mapping_filename2, System.Drawing.Imaging.ImageFormat.Gif);
                    if (debug_mapping_filename2.ToLower().EndsWith("jpg"))
                        debug_bmp.Save(debug_mapping_filename2, System.Drawing.Imaging.ImageFormat.Jpeg);
                    if (debug_mapping_filename2.ToLower().EndsWith("bmp"))
                        debug_bmp.Save(debug_mapping_filename2, System.Drawing.Imaging.ImageFormat.Bmp);
                    */
				}

                UpdateOverallMap(
                    buffer,
                    current_buffer_index,
                    overall_map_filename,
                    ref overall_map_img,
		            overall_img_width,
		            overall_img_height,				                 
                    overall_map_dimension_mm,
                    overall_map_centre_x_mm,
                    overall_map_centre_y_mm);
					
                // swap the two metagrids
                SwapBuffers(
                    grid_centres,
                    ref current_grid_index,
                    buffer,
                    ref current_buffer_index,
                    ref update_map);

                buffer_transition = true;
            }
            return(buffer_transition);
        }
示例#52
0
		public void CreateStereoCameras(
		    int no_of_stereo_cameras,
		    float cam_baseline_mm,
			float dist_from_centre_of_tilt_mm, 
		    int cam_image_width, 
		    int cam_image_height,
		    float cam_FOV_degrees,
		    float head_diameter_mm,
		    float default_head_orientation_degrees)
		{
			this.head_diameter_mm = head_diameter_mm;
			pose = new pos3D[no_of_stereo_cameras];
			for (int i = 0; i < no_of_stereo_cameras; i++)
			    pose[i] = new pos3D(0,0,0);
			
			baseline_mm = new float[no_of_stereo_cameras];
			image_width = new int[no_of_stereo_cameras];
			image_height = new int[no_of_stereo_cameras];
			FOV_degrees = new float[no_of_stereo_cameras];
			stereo_camera_position_x = new float[no_of_stereo_cameras];
			stereo_camera_position_y = new float[no_of_stereo_cameras];
			stereo_camera_position_z = new float[no_of_stereo_cameras];
			stereo_camera_pan = new float[no_of_stereo_cameras];
			stereo_camera_tilt = new float[no_of_stereo_cameras];
			stereo_camera_roll = new float[no_of_stereo_cameras];
			left_camera_location = new pos3D[no_of_stereo_cameras];
			right_camera_location = new pos3D[no_of_stereo_cameras];
			
			for (int cam = 0; cam < no_of_stereo_cameras; cam++)
			{
				float cam_orientation = cam * (float)Math.PI*2 / no_of_stereo_cameras;
				cam_orientation += default_head_orientation_degrees * (float)Math.PI / 180.0f;
				stereo_camera_position_x[cam] = head_diameter_mm * 0.5f * (float)Math.Sin(cam_orientation);
				stereo_camera_position_y[cam] = head_diameter_mm * 0.5f * (float)Math.Cos(cam_orientation);				
				stereo_camera_position_z[cam] = dist_from_centre_of_tilt_mm;
				stereo_camera_pan[cam] = cam_orientation;
				
				baseline_mm[cam] = cam_baseline_mm;
				image_width[cam] = cam_image_width;
				image_height[cam] = cam_image_height;
				FOV_degrees[cam] = cam_FOV_degrees;
			}
		}
示例#53
0
        public float Localise(
		    robotGeometry geom,
		    float[][] stereo_features,
		    byte[][,] stereo_features_colour,
		    float[][] stereo_features_uncertainties,
		    Random rnd,
            ref pos3D pose_offset,
            ref bool buffer_transition,
            string debug_mapping_filename,
            float known_best_pose_x_mm,
            float known_best_pose_y_mm,
		    string overall_map_filename,
		    ref byte[] overall_map_img,
		    int overall_img_width,
		    int overall_img_height,
		    int overall_map_dimension_mm,
		    int overall_map_centre_x_mm,
		    int overall_map_centre_y_mm)		                      
        {
			return(Localise(
		        geom.body_width_mm,
		        geom.body_length_mm,
		        geom.body_centre_of_rotation_x,
		        geom.body_centre_of_rotation_y,
		        geom.body_centre_of_rotation_z,
		        geom.head_centroid_x,
		        geom.head_centroid_y,
		        geom.head_centroid_z,
		        geom.head_pan,
		        geom.head_tilt,
		        geom.head_roll,
		        geom.baseline_mm,
		        geom.stereo_camera_position_x,
		        geom.stereo_camera_position_y,
		        geom.stereo_camera_position_z,
		        geom.stereo_camera_pan,
		        geom.stereo_camera_tilt,
		        geom.stereo_camera_roll,
                geom.image_width,
                geom.image_height,
                geom.FOV_degrees,
		        stereo_features,
		        stereo_features_colour,
		        stereo_features_uncertainties,
                geom.sensormodel,
                ref geom.left_camera_location,
                ref geom.right_camera_location,
                geom.no_of_sample_poses,
                geom.sampling_radius_major_mm,
                geom.sampling_radius_minor_mm,
                geom.pose,
                geom.max_orientation_variance,
                geom.max_tilt_variance,
                geom.max_roll_variance,
                geom.poses,
                geom.pose_probability,
		        rnd,
                ref pose_offset,
                ref buffer_transition,
                debug_mapping_filename,
                known_best_pose_x_mm,
                known_best_pose_y_mm,
		        overall_map_filename,
		        ref overall_map_img,
		        overall_img_width,
		        overall_img_height,
		        overall_map_dimension_mm,
		        overall_map_centre_x_mm,
		        overall_map_centre_y_mm			                
			));
		}
示例#54
0
        /// <summary>
        /// parse an xml node to extract geometry parameters
        /// </summary>
        /// <param name="xnod"></param>
        /// <param name="level"></param>
        public void LoadFromXml(
		    XmlNode xnod, int level,
            ref int cameraIndex)
        {
            XmlNode xnodWorking;

			if (xnod.Name == "StereoCamera")
			{
				cameraIndex++;
			}			
            if (xnod.Name == "BodyWidthMillimetres")
            {
                body_width_mm = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "BodyLengthMillimetres")
            {
                body_length_mm = Convert.ToSingle(xnod.InnerText);
            }
            if (xnod.Name == "BodyHeightMillimetres")
            {
                body_height_mm = Convert.ToSingle(xnod.InnerText);
            }

			if (xnod.Name == "CentreOfRotationX")
            {
                body_centre_of_rotation_x = Convert.ToSingle(xnod.InnerText);
            }
			if (xnod.Name == "CentreOfRotationY")
            {
                body_centre_of_rotation_y = Convert.ToSingle(xnod.InnerText);
            }
			if (xnod.Name == "CentreOfRotationZ")
            {
                body_centre_of_rotation_z = Convert.ToSingle(xnod.InnerText);
            }
			
			if (xnod.Name == "HeadCentreX")
			{
				head_centroid_x = Convert.ToSingle(xnod.InnerText);
			}
			if (xnod.Name == "HeadCentreY")
			{
				head_centroid_y = Convert.ToSingle(xnod.InnerText);
			}
			if (xnod.Name == "HeadCentreZ")
			{
				head_centroid_z = Convert.ToSingle(xnod.InnerText);
			}

			if (xnod.Name == "HeadDiameterMillimetres")
			{
				head_diameter_mm = Convert.ToSingle(xnod.InnerText);
			}

			if (xnod.Name == "MaxOrientationVarianceDegrees")
			{
				max_orientation_variance = Convert.ToSingle(xnod.InnerText) / 180.0f * (float)Math.PI;
			}
			if (xnod.Name == "MaxTiltVarianceDegrees")
			{
				max_tilt_variance = Convert.ToSingle(xnod.InnerText) / 180.0f * (float)Math.PI;
			}
			if (xnod.Name == "MaxRollVarianceDegrees")
			{
				max_roll_variance = Convert.ToSingle(xnod.InnerText) / 180.0f * (float)Math.PI;
			}

			if (xnod.Name == "NoOfStereoCameras")
			{
				cameraIndex = -1;
				int no_of_stereo_cameras = Convert.ToInt32(xnod.InnerText);
				
				baseline_mm = new float[no_of_stereo_cameras];
				pose = new pos3D[no_of_stereo_cameras];
				poses = new List<pos3D>();
				stereo_camera_position_x = new float[no_of_stereo_cameras];
				stereo_camera_position_y = new float[no_of_stereo_cameras];
				stereo_camera_position_z = new float[no_of_stereo_cameras];
				stereo_camera_pan = new float[no_of_stereo_cameras];
				stereo_camera_tilt = new float[no_of_stereo_cameras];
				stereo_camera_roll = new float[no_of_stereo_cameras];
				image_width = new int[no_of_stereo_cameras];
				image_height = new int[no_of_stereo_cameras];
				FOV_degrees = new float[no_of_stereo_cameras];
			    left_camera_location = new pos3D[no_of_stereo_cameras];
			    right_camera_location = new pos3D[no_of_stereo_cameras];
				for (int i = 0; i < no_of_stereo_cameras; i++)
				{
					left_camera_location[i] = new pos3D(0,0,0);
					right_camera_location[i] = new pos3D(0,0,0);
				}
			}
			
			if (xnod.Name == "BaselineMillimetres")
			{
				baseline_mm[cameraIndex] = Convert.ToSingle(xnod.InnerText);
			}
			if (xnod.Name == "PositionMillimetres")
			{
				string[] str = xnod.InnerText.Split(' ');
				stereo_camera_position_x[cameraIndex] = Convert.ToSingle(str[0]);
				stereo_camera_position_y[cameraIndex] = Convert.ToSingle(str[1]);
				stereo_camera_position_z[cameraIndex] = Convert.ToSingle(str[2]);
			}
			if (xnod.Name == "OrientationDegrees")
			{
				string[] str = xnod.InnerText.Split(' ');
				stereo_camera_pan[cameraIndex] = Convert.ToSingle(str[0]) / 180.0f * (float)Math.PI;
				stereo_camera_tilt[cameraIndex] = Convert.ToSingle(str[1]) / 180.0f * (float)Math.PI;
				stereo_camera_roll[cameraIndex] = Convert.ToSingle(str[2]) / 180.0f * (float)Math.PI;
			}
			
			if (xnod.Name == "ImageDimensions")
			{
				string[] str = xnod.InnerText.Split(' ');
				image_width[cameraIndex] = Convert.ToInt32(str[0]);
				image_height[cameraIndex] = Convert.ToInt32(str[1]);
			}
			
			if (xnod.Name == "FOVDegrees")
			{
				FOV_degrees[cameraIndex] = Convert.ToSingle(xnod.InnerText);
			}			
			
            // call recursively on all children of the current node
            if (xnod.HasChildNodes)
            {
                xnodWorking = xnod.FirstChild;
                while (xnodWorking != null)
                {
                    LoadFromXml(xnodWorking, level + 1, ref cameraIndex);
                    xnodWorking = xnodWorking.NextSibling;
                }
            }
        }
示例#55
0
        /// <summary>
        /// Update the current grid with new mapping rays loaded from the disparities file
        /// </summary>
        /// <param name="body_width_mm">width of the robot body in millimetres</param>
        /// <param name="body_length_mm">length of the robot body in millimetres</param>
        /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param>
        /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param>
        /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param>
        /// <param name="baseline_mm">stereo camera baseline in millimetres</param>
        /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param>
        /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param>
        /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param>
        /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param>
        /// <param name="image_width">image width for each stereo camera</param>
        /// <param name="image_height">image height for each stereo camera</param>
        /// <param name="FOV_degrees">field of view for each stereo camera in degrees</param>
        /// <param name="sensormodel">sensor model for each stereo camera</param>
        protected void UpdateMap(
		    float body_width_mm,
		    float body_length_mm,
		    float body_centre_of_rotation_x,
		    float body_centre_of_rotation_y,
		    float body_centre_of_rotation_z,
		    float head_centroid_x,
		    float head_centroid_y,
		    float head_centroid_z,
		    float[] baseline_mm,
		    float[] stereo_camera_position_x,
		    float[] stereo_camera_position_y,
		    float[] stereo_camera_position_z,
		    float[] stereo_camera_pan,
		    float[] stereo_camera_tilt,
		    float[] stereo_camera_roll,
            int[] image_width,
            int[] image_height,
            float[] FOV_degrees,
            stereoModel[][] sensormodel)
        {
            const bool use_compression = false;

            if (disparities_file_open)
			{
				float[] stereo_features;
				byte[,] stereo_features_colour;
		        float[] stereo_features_uncertainties;
				pos3D robot_pose = new pos3D(0,0,0);

                //int next_grid_index = current_grid_index + 1;
                //if (next_grid_index >= grid_centres.Count / 3) next_grid_index = (grid_centres.Count / 3) - 1;

                int next_grid_index = current_grid_index;
                //if (current_grid_index < 1)
                //    next_grid_index = 1;
                
                int next_disparity_index = disparities_index[next_grid_index];
	            for (int i = current_disparity_index; i < next_disparity_index; i++)
				{
	                long time_long = disparities_reader.ReadInt64();
	                robot_pose.x = disparities_reader.ReadSingle();
					robot_pose.y = disparities_reader.ReadSingle();
					robot_pose.pan = disparities_reader.ReadSingle();
					float head_pan = disparities_reader.ReadSingle();
					float head_tilt = disparities_reader.ReadSingle();
					float head_roll = disparities_reader.ReadSingle();
	                int stereo_camera_index = disparities_reader.ReadInt32();
					int features_count = disparities_reader.ReadInt32();

                    if (use_compression)
                    {
                        int features_bytes = disparities_reader.ReadInt32();
                        byte[] fb = new byte[features_bytes];
                        disparities_reader.Read(fb, 0, features_bytes);
                        byte[] packed_stereo_features2 = AcedInflator.Instance.Decompress(fb, 0, 0, 0);
                        stereo_features = ArrayConversions.ToFloatArray(packed_stereo_features2);
                    }
                    else
                    {
                        byte[] fb = new byte[features_count * 3 * 4];
                        disparities_reader.Read(fb, 0, fb.Length);
                        stereo_features = ArrayConversions.ToFloatArray(fb);
                    }

                    byte[] packed_stereo_feature_colours = null;
                    if (use_compression)
                    {
                        int colour_bytes = disparities_reader.ReadInt32();
                        byte[] cb = new byte[colour_bytes];
                        disparities_reader.Read(cb, 0, colour_bytes);
                        packed_stereo_feature_colours = AcedInflator.Instance.Decompress(cb, 0, 0, 0);
                    }
                    else
                    {
                        packed_stereo_feature_colours = new byte[features_count * 3];
                        disparities_reader.Read(packed_stereo_feature_colours, 0, packed_stereo_feature_colours.Length);
                    }
					
					// unpack stereo features
					int ctr = 0;
					stereo_features_colour = new byte[features_count,3];
					stereo_features_uncertainties = new float[features_count];
					
	                for (int f = 0; f < features_count; f++)
	                {
						stereo_features_uncertainties[f] = 1;
						stereo_features_colour[f, 0] = packed_stereo_feature_colours[ctr++];
						stereo_features_colour[f, 1] = packed_stereo_feature_colours[ctr++];
						stereo_features_colour[f, 2] = packed_stereo_feature_colours[ctr++];
	                }
					
					// insert the rays into the map
                    Map(body_width_mm,
		                body_length_mm,
		                body_centre_of_rotation_x,
		                body_centre_of_rotation_y,
		                body_centre_of_rotation_z,
		                head_centroid_x,
		                head_centroid_y,
		                head_centroid_z,
		                head_pan,
		                head_tilt,
		                head_roll,
					    stereo_camera_index,
		                baseline_mm[stereo_camera_index],
		                stereo_camera_position_x[stereo_camera_index],
		                stereo_camera_position_y[stereo_camera_index],
		                stereo_camera_position_z[stereo_camera_index],
		                stereo_camera_pan[stereo_camera_index],
		                stereo_camera_tilt[stereo_camera_index],
		                stereo_camera_roll[stereo_camera_index],
                        image_width[stereo_camera_index],
                        image_height[stereo_camera_index],
                        FOV_degrees[stereo_camera_index],
		                stereo_features,
		                stereo_features_colour,
		                stereo_features_uncertainties,
                        sensormodel,
                        robot_pose);
					
					stereo_features = null;
					stereo_features_colour = null;
					stereo_features_uncertainties = null;                    
				}
                current_disparity_index = next_disparity_index;
			}
			else
			{
				Console.WriteLine("disparities file not open");
			}
        }
示例#56
0
		/// <summary>
		/// sets the position and orientation of the robots head
		/// </summary>
		/// <param name="p">
		/// A <see cref="pos3D"/>
		/// </param>
		public void SetHeadPositionOrientation(pos3D p)
		{
			head_centroid_x = p.x;
			head_centroid_y = p.y;
			head_centroid_z = p.z;
			head_pan = p.pan;
			head_tilt = p.tilt;
			head_roll = p.roll;
		}
示例#57
0
		public void InsertRays()
		{
			int no_of_stereo_features = 2000;
		    int image_width = 640;
		    int image_height = 480;
			int no_of_stereo_cameras = 1;
		    int localisationRadius_mm = 16000;
		    int maxMappingRange_mm = 16000;
		    int cellSize_mm = 32;
		    int dimension_cells = 16000 / cellSize_mm;
		    int dimension_cells_vertical = dimension_cells/2;
		    float vacancyWeighting = 0.5f;
			float FOV_horizontal = 78 * (float)Math.PI / 180.0f;
					    
			// create a grid
			Console.WriteLine("Creating grid");
		    occupancygridSimple grid = 
		        new occupancygridSimple(
		            dimension_cells,
		            dimension_cells_vertical,
		            cellSize_mm,
		            localisationRadius_mm,
		            maxMappingRange_mm,
		            vacancyWeighting);
		    
		    Assert.AreNotEqual(grid, null, "object occupancygridSimple was not created");
			
			Console.WriteLine("Creating sensor models");			
			stereoModel inverseSensorModel = new stereoModel();
			inverseSensorModel.FOV_horizontal = FOV_horizontal;
			inverseSensorModel.FOV_vertical = FOV_horizontal * image_height / image_width;			
			inverseSensorModel.createLookupTable(cellSize_mm, image_width, image_height);

            //Assert.AreNotEqual(0, inverseSensorModel.ray_model.probability[1][5], "Ray model probabilities not updated");
						
			// observer parameters
            int pan_angle_degrees = 0;
		    pos3D observer = new pos3D(0,0,0);
            observer.pan = pan_angle_degrees * (float)Math.PI / 180.0f;
		    float stereo_camera_baseline_mm = 100;
			pos3D left_camera_location = new pos3D(stereo_camera_baseline_mm*0.5f,0,0);
			pos3D right_camera_location = new pos3D(-stereo_camera_baseline_mm*0.5f,0,0);
            left_camera_location = left_camera_location.rotate(observer.pan, observer.tilt, observer.roll);
            right_camera_location = right_camera_location.rotate(observer.pan, observer.tilt, observer.roll);
            left_camera_location = left_camera_location.translate(observer.x, observer.y, observer.z);
            right_camera_location = right_camera_location.translate(observer.x, observer.y, observer.z);
		    float FOV_degrees = 78;
		    float[] stereo_features = new float[no_of_stereo_features * 3];
		    byte[,] stereo_features_colour = new byte[no_of_stereo_features, 3];
		    float[] stereo_features_uncertainties = new float[no_of_stereo_features];
			
			// create some stereo disparities within the field of view
			Console.WriteLine("Adding disparities");
			//MersenneTwister rnd = new MersenneTwister(0);
            Random rnd = new Random(0);
			for (int correspondence = 0; correspondence < no_of_stereo_features; correspondence++)
			{
				float x = rnd.Next(image_width-1);
				float y = rnd.Next(image_height/50) + (image_height/2);
				float disparity = 7;
				if ((x < image_width/5) || (x > image_width * 4/5))
				{
					disparity = 7; //15;
				}
				byte colour_red = (byte)rnd.Next(255);
				byte colour_green = (byte)rnd.Next(255);
				byte colour_blue = (byte)rnd.Next(255);
				
				stereo_features[correspondence*3] = x;
				stereo_features[(correspondence*3)+1] = y;
				stereo_features[(correspondence*3)+2] = disparity;
				stereo_features_colour[correspondence, 0] = colour_red;
				stereo_features_colour[correspondence, 1] = colour_green;
				stereo_features_colour[correspondence, 2] = colour_blue;
				stereo_features_uncertainties[correspondence] = 0;
			}
			
            // create an observation as a set of rays from the stereo correspondence results
            List<evidenceRay>[] stereo_rays = new List<evidenceRay>[no_of_stereo_cameras];
            for (int cam = 0; cam < no_of_stereo_cameras; cam++)
			{
				Console.WriteLine("Creating rays");
                stereo_rays[cam] = 
					inverseSensorModel.createObservation(
					    observer,
		                stereo_camera_baseline_mm,
		                image_width,
		                image_height,
		                FOV_degrees,
		                stereo_features,
		                stereo_features_colour,
		                stereo_features_uncertainties,
					    true);

				// insert rays into the grid
				Console.WriteLine("Throwing rays");
				for (int ray = 0; ray < stereo_rays[cam].Count; ray++)
				{
					grid.Insert(stereo_rays[cam][ray], inverseSensorModel.ray_model, left_camera_location, right_camera_location, false);
				}
			}
					
			// save the result as an image
			Console.WriteLine("Saving grid");
			int debug_img_width = 640;
			int debug_img_height = 480;
		    byte[] debug_img = new byte[debug_img_width * debug_img_height * 3];
			Bitmap bmp = new Bitmap(debug_img_width, debug_img_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb);

			grid.Show(debug_img, debug_img_width, debug_img_height, false, false);
			BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp);
			bmp.Save("tests_occupancygrid_simple_InsertRays_overhead.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

			grid.ShowFront(debug_img, debug_img_width, debug_img_height, true);
			BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp);
			bmp.Save("tests_occupancygrid_simple_InsertRays_front.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

            // side view of the probabilities
            float max_prob = -1;
            float min_prob = 1;
            float[] probs = new float[dimension_cells/2];
            float[] mean_colour = new float[3];
            for (int y = dimension_cells / 2; y < dimension_cells; y++)
            {
                float p = grid.GetProbability(dimension_cells / 2, y, mean_colour);                
                probs[y - (dimension_cells / 2)] = p;
                if (p != occupancygridSimple.NO_OCCUPANCY_EVIDENCE)
                {
                    if (p < min_prob) min_prob = p;
                    if (p > max_prob) max_prob = p;
                }
            }
            for (int i = 0; i < debug_img.Length; i++) debug_img[i] = 255;
            int prev_x = -1;
            int prev_y = debug_img_height / 2;
            for (int i = 0; i < probs.Length; i++)
            {
                if (probs[i] != occupancygridSimple.NO_OCCUPANCY_EVIDENCE)
                {
                    int x = i * (debug_img_width - 1) / probs.Length;
                    int y = debug_img_height - 1 - (int)((probs[i] - min_prob) / (max_prob - min_prob) * (debug_img_height - 1));
                    int n = ((y * debug_img_width) + x) * 3;
                    if (prev_x > -1)
                    {
                        int r = 255;
                        int g = 0;
                        int b = 0;
                        if (probs[i] > 0.5f)
                        {
                            r = 0;
                            g = 255;
                            b = 0;
                        }
                        drawing.drawLine(debug_img, debug_img_width, debug_img_height, prev_x, prev_y, x, y, r, g, b, 0, false);
                    }
                    prev_x = x;
                    prev_y = y;
                }
            }
            int y_zero = debug_img_height - 1 - (int)((0.5f-min_prob) / (max_prob - min_prob) * (debug_img_height - 1));
            drawing.drawLine(debug_img, debug_img_width, debug_img_height, 0, y_zero, debug_img_width - 1, y_zero, 0, 0, 0, 0, false);

            BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp);
            bmp.Save("tests_occupancygrid_simple_InsertRays_probs.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
        }
示例#58
0
        public void Roll()
        {
            int roll_angle1 = 20;
            float roll1 = roll_angle1 * (float)Math.PI / 180.0f;
            
            pos3D pos1 = new pos3D(50, 0, 0);
            pos3D pos2 = pos1.rotate_old(0,0,roll1);
            pos3D pos3 = pos1.rotate(0,0,roll1);
            
            float dx = Math.Abs(pos2.x - pos3.x);
            float dy = Math.Abs(pos2.y - pos3.y);
            float dz = Math.Abs(pos2.z - pos3.z);
            Console.WriteLine("pos old: " + pos2.x.ToString() + ",  " + pos2.y.ToString() + ",  " + pos2.z.ToString());
            Console.WriteLine("pos new: " + pos3.x.ToString() + ",  " + pos3.y.ToString() + ",  " + pos3.z.ToString());
            Assert.Less(dx, 1);
            Assert.Less(dy, 1);
            Assert.Less(dz, 1);
	    }
示例#59
0
        /// <summary>
        /// insert a stereo ray into the grid
        /// </summary>
        /// <param name="ray">stereo ray</param>
        /// <param name="origin">pose from which this observation was made</param>
        /// <param name="sensormodel">sensor model for each grid level</param>
        /// <param name="left_camera_location">left stereo camera position and orientation</param>
        /// <param name="right_camera_location">right stereo camera position and orientation</param>
        /// <param name="localiseOnly">whether we are mapping or localising</param>
        /// <returns>localisation matching score</returns>
        public float Insert(
            evidenceRay ray,
            particlePose origin,
		    stereoModel[] sensormodel,                            
            pos3D left_camera_location,
            pos3D right_camera_location,
            bool localiseOnly)
        {
            float matchingScore = 0;

            switch (grid_type)
            {
                case TYPE_MULTI_HYPOTHESIS:
                    {
                        for (int grid_level = 0; grid_level < grid.Length; grid_level++)
                        {
					        rayModelLookup sensormodel_lookup = sensormodel[grid_level].ray_model;
                            occupancygridMultiHypothesis grd = (occupancygridMultiHypothesis)grid[grid_level];
                            matchingScore += grid_weight[grid_level] * grd.Insert(ray, origin, sensormodel_lookup, left_camera_location, right_camera_location, localiseOnly);
                        }
                        break;
                    }
            }
            return (matchingScore);
        }
示例#60
0
        /// <summary>
        /// Returns positions of grid cells corresponding to a moire grid
        /// produced by the interference of a pair of hexagonal grids (theta waves)
        /// </summary>
        /// <param name="sampling_radius_major_mm">radius of the major axis of the bounding ellipse</param>
        /// <param name="sampling_radius_minor_mm">radius of the minor axis of the bounding ellipse</param>
        /// <param name="first_grid_spacing">spacing of the first hexagonal grid (theta wavelength 1)</param>
        /// <param name="second_grid_spacing">spacing of the second hexagonal grid (theta wavelength 2)</param>
        /// <param name="first_grid_rotation_degrees">rotation of the first grid (theta field 1)</param>
        /// <param name="second_grid_rotation_degrees">rotation of the second grid (theta field 2)</param>
        /// <param name="dimension_x_cells">number of grid cells in the x axis</param>
        /// <param name="dimension_y_cells">number of grid cells in the y axis</param>
        /// <param name="scaling_factor">scaling factor (k)</param>
        /// <param name="img">image data</param>
        /// <param name="img_width">image width</param>
        /// <param name="img_height">image height</param>
        /// <param name="radius">radius in pixels</param>
        public static void ShowMoireGridVertices(
            float sampling_radius_major_mm,
            float sampling_radius_minor_mm,
            float first_grid_spacing,
            float second_grid_spacing,
            float first_grid_rotation_degrees,
            float second_grid_rotation_degrees,
            int dimension_x_cells,
            int dimension_y_cells,
            float scaling_factor,
            byte[] img,
            int img_width,
            int img_height,
            int radius)
        {
            List <pos3D> cells = new List <pos3D>();

            CreateMoireGrid(
                sampling_radius_major_mm,
                sampling_radius_minor_mm,
                first_grid_spacing,
                second_grid_spacing,
                first_grid_rotation_degrees,
                second_grid_rotation_degrees,
                dimension_x_cells,
                dimension_y_cells,
                scaling_factor,
                ref cells);

            float min_x = float.MaxValue;
            float max_x = float.MinValue;
            float min_y = float.MaxValue;
            float max_y = float.MinValue;

            for (int i = 0; i < cells.Count; i++)
            {
                if (cells[i].x < min_x)
                {
                    min_x = cells[i].x;
                }
                if (cells[i].y < min_y)
                {
                    min_y = cells[i].y;
                }
                if (cells[i].x > max_x)
                {
                    max_x = cells[i].x;
                }
                if (cells[i].y > max_y)
                {
                    max_y = cells[i].y;
                }
            }

            if (max_x - min_x > max_y - min_y)
            {
                float cy = min_y + ((max_y - min_y) / 2);
                min_y = cy - ((max_x - min_x) / 2);
                max_y = min_y + (max_x - min_x);
            }
            else
            {
                float cx = min_x + ((max_x - min_x) / 2);
                min_x = cx - ((max_y - min_y) / 2);
                max_x = min_x + (max_y - min_y);
            }

            for (int i = (img_width * img_height * 3) - 1; i >= 0; i--)
            {
                img[i] = 0;
            }

            for (int i = 0; i < cells.Count; i++)
            {
                pos3D cell = cells[i];
                int   x    = (int)((cell.x - min_x) * img_width / (max_x - min_x));
                int   y    = (int)((cell.y - min_y) * img_height / (max_y - min_y));
                drawing.drawSpot(img, img_width, img_height, x, y, radius, 255, 255, 255);
            }
        }