public void SaveAndLoad() { string filename = "tests_robotGeometry_SaveAndLoad.xml"; int no_of_stereo_cameras = 2; robotGeometry geom1 = new robotGeometry(); geom1.CreateStereoCameras(no_of_stereo_cameras, 120, 0, 320, 240, 78, 100, 0); geom1.SetBodyDimensions(800, 700, 600); geom1.SetCentreOfRotation(400, 350, 10); geom1.SetHeadPosition(400, 350, 1000); geom1.Save(filename); robotGeometry geom2 = new robotGeometry(); geom2.Load(filename); Assert.AreEqual(geom1.body_width_mm, geom2.body_width_mm); Assert.AreEqual(geom1.body_length_mm, geom2.body_length_mm); Assert.AreEqual(geom1.body_height_mm, geom2.body_height_mm); Assert.AreEqual(geom1.body_centre_of_rotation_x, geom2.body_centre_of_rotation_x); Assert.AreEqual(geom1.body_centre_of_rotation_y, geom2.body_centre_of_rotation_y); Assert.AreEqual(geom1.body_centre_of_rotation_z, geom2.body_centre_of_rotation_z); Assert.AreEqual(geom1.head_centroid_x, geom2.head_centroid_x); Assert.AreEqual(geom1.head_centroid_y, geom2.head_centroid_y); Assert.AreEqual(geom1.head_centroid_z, geom2.head_centroid_z); for (int cam = 0; cam < no_of_stereo_cameras; cam++) { Assert.AreEqual(geom1.baseline_mm[cam], geom2.baseline_mm[cam]); Assert.AreEqual(geom1.image_width[cam], geom2.image_width[cam]); Assert.AreEqual(geom1.image_height[cam], geom2.image_height[cam]); Assert.AreEqual(geom1.FOV_degrees[cam], geom2.FOV_degrees[cam]); Assert.AreEqual(geom1.stereo_camera_position_x[cam], geom2.stereo_camera_position_x[cam]); Assert.AreEqual(geom1.stereo_camera_position_y[cam], geom2.stereo_camera_position_y[cam]); Assert.AreEqual(geom1.stereo_camera_position_z[cam], geom2.stereo_camera_position_z[cam]); Assert.AreEqual(geom1.stereo_camera_pan[cam], geom2.stereo_camera_pan[cam]); Assert.AreEqual(geom1.stereo_camera_tilt[cam], geom2.stereo_camera_tilt[cam]); Assert.AreEqual(geom1.stereo_camera_roll[cam], geom2.stereo_camera_roll[cam]); } }
public void LocaliseAlongPath() { // systematic bias float bias_x_mm = -200; float bias_y_mm = 0; // number of stereo cameras on the robot's head int no_of_stereo_cameras = 2; // diameter of the robot's head float head_diameter_mm = 100; // number of stereo features per step during mapping int no_of_mapping_stereo_features = 300; // number of stereo features observed per localisation step int no_of_localisation_stereo_features = 100; string filename = "localise_along_path.dat"; float path_length_mm = 20000; float start_orientation = 0; float end_orientation = 0; // 90 * (float)Math.PI / 180.0f; float distance_between_poses_mm = 100; float disparity = 15; string overall_map_filename = "overall_map.jpg"; byte[] overall_map_img = null; int overall_img_width = 640; int overall_img_height = 480; int overall_map_dimension_mm = 0; int overall_map_centre_x_mm = 0; int overall_map_centre_y_mm = 0; string[] str = filename.Split('.'); List <OdometryData> path = null; SavePath( filename, path_length_mm, start_orientation, end_orientation, distance_between_poses_mm, disparity, no_of_mapping_stereo_features, no_of_stereo_cameras, ref path); Assert.AreEqual(true, File.Exists(filename)); Assert.AreEqual(true, File.Exists(str[0] + "_disparities_index.dat")); Assert.AreEqual(true, File.Exists(str[0] + "_disparities.dat")); int no_of_grids = 1; int grid_type = metagrid.TYPE_SIMPLE; int dimension_mm = 8000; int dimension_vertical_mm = 2000; int cellSize_mm = 50; int localisationRadius_mm = 8000; int maxMappingRange_mm = 10000; float vacancyWeighting = 0.5f; metagridBuffer buffer = new metagridBuffer( no_of_grids, grid_type, dimension_mm, dimension_vertical_mm, cellSize_mm, localisationRadius_mm, maxMappingRange_mm, vacancyWeighting); buffer.LoadPath( filename, str[0] + "_disparities_index.dat", str[0] + "_disparities.dat", ref overall_map_dimension_mm, ref overall_map_centre_x_mm, ref overall_map_centre_y_mm); int img_width = 640; int img_height = 640; byte[] img = new byte[img_width * img_height * 3]; Bitmap bmp = new Bitmap(img_width, img_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); buffer.ShowPath(img, img_width, img_height, true, true); BitmapArrayConversions.updatebitmap_unsafe(img, bmp); bmp.Save("localise_along_path.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); robotGeometry geom = new robotGeometry(); geom.CreateStereoCameras( no_of_stereo_cameras, 120, 0, 320, 240, 65, head_diameter_mm, 0); geom.CreateSensorModels(buffer); Random rnd = new Random(0); pos3D pose_offset = null; bool buffer_transition = false; float[][] stereo_features = new float[no_of_stereo_cameras][]; byte[][,] stereo_features_colour = new byte[no_of_stereo_cameras][, ]; float[][] stereo_features_uncertainties = new float[no_of_stereo_cameras][]; for (int i = 0; i < no_of_stereo_cameras; i++) { stereo_features_uncertainties[i] = new float[no_of_localisation_stereo_features]; for (int j = 0; j < no_of_localisation_stereo_features; j++) { stereo_features_uncertainties[i][j] = 1; } } float average_offset_x_mm = 0; float average_offset_y_mm = 0; List <OdometryData> estimated_path = new List <OdometryData>(); int no_of_localisation_failures = 0; for (int i = 0; i < path.Count - 1; i += 5) { string debug_mapping_filename = "localise_along_path_map_" + i.ToString() + ".jpg"; OdometryData p0 = path[i]; OdometryData p1 = path[i + 1]; // create an intermediate pose for (int cam = 0; cam < no_of_stereo_cameras; cam++) { geom.pose[cam].x = p0.x + ((p1.x - p0.x) / 2) + bias_x_mm; geom.pose[cam].y = p0.y + ((p1.y - p0.y) / 2) + bias_y_mm; geom.pose[cam].z = 0; geom.pose[cam].pan = p0.orientation + ((p1.orientation - p0.orientation) / 2); // create stereo features int ctr = 0; stereo_features[cam] = new float[no_of_localisation_stereo_features * 3]; stereo_features_colour[cam] = new byte[no_of_localisation_stereo_features, 3]; for (int f = 0; f < no_of_localisation_stereo_features; f += 5) { if (f < no_of_localisation_stereo_features / 2) { stereo_features[cam][ctr++] = 20; stereo_features[cam][ctr++] = rnd.Next(239); } else { stereo_features[cam][ctr++] = geom.image_width[cam] - 20; stereo_features[cam][ctr++] = rnd.Next(239); } stereo_features[cam][ctr++] = disparity; } } float matching_score = buffer.Localise( geom, stereo_features, stereo_features_colour, stereo_features_uncertainties, rnd, ref pose_offset, ref buffer_transition, debug_mapping_filename, bias_x_mm, bias_y_mm, overall_map_filename, ref overall_map_img, overall_img_width, overall_img_height, overall_map_dimension_mm, overall_map_centre_x_mm, overall_map_centre_y_mm); if (matching_score != occupancygridBase.NO_OCCUPANCY_EVIDENCE) { Console.WriteLine("pose_offset (mm): " + pose_offset.x.ToString() + ", " + pose_offset.y.ToString() + ", " + pose_offset.pan.ToString()); OdometryData estimated_pose = new OdometryData(); estimated_pose.x = geom.pose[0].x + pose_offset.x; estimated_pose.y = geom.pose[0].y + pose_offset.y; estimated_pose.orientation = geom.pose[0].pan + pose_offset.pan; estimated_path.Add(estimated_pose); average_offset_x_mm += pose_offset.x; average_offset_y_mm += pose_offset.y; } else { // fail! no_of_localisation_failures++; Console.WriteLine("Localisation failure"); } } buffer.ShowPath(img, img_width, img_height, true, true); BitmapArrayConversions.updatebitmap_unsafe(img, bmp); bmp.Save("localisations_along_path.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); average_offset_x_mm /= estimated_path.Count; average_offset_y_mm /= estimated_path.Count; Console.WriteLine("Average offsets: " + average_offset_x_mm.ToString() + ", " + average_offset_y_mm.ToString()); float diff_x_mm = Math.Abs(average_offset_x_mm - bias_x_mm); float diff_y_mm = Math.Abs(average_offset_y_mm - bias_y_mm); Assert.Less(diff_x_mm, cellSize_mm * 3 / 2, "x bias not detected"); Assert.Less(diff_y_mm, cellSize_mm * 3 / 2, "y bias not detected"); if (no_of_localisation_failures > 0) { Console.WriteLine("Localisation failures: " + no_of_localisation_failures.ToString()); } else { Console.WriteLine("No localisation failures!"); } Assert.Less(no_of_localisation_failures, 4, "Too many localisation failures"); }