/// <summary> /// calculate the position of the robots head and cameras for this pose /// </summary> /// <param name="rob">robot object</param> /// <param name="head_location">location of the centre of the head</param> /// <param name="camera_centre_location">location of the centre of each stereo camera</param> /// <param name="left_camera_location">location of the left camera within each stereo camera</param> /// <param name="right_camera_location">location of the right camera within each stereo camera</param> protected void calculateCameraPositions( robot rob, ref pos3D head_location, ref pos3D[] camera_centre_location, ref pos3D[] left_camera_location, ref pos3D[] right_camera_location) { // calculate the position of the centre of the head relative to // the centre of rotation of the robots body pos3D head_centroid = new pos3D(-(rob.BodyWidth_mm / 2) + rob.head.x, -(rob.BodyLength_mm / 2) + rob.head.y, rob.head.z); // location of the centre of the head on the grid map // adjusted for the robot pose and the head pan and tilt angle. // Note that the positions and orientations of individual cameras // on the head have already been accounted for within stereoModel.createObservation pos3D head_locn = head_centroid.rotate(rob.head.pan + pan, rob.head.tilt, 0); head_locn = head_locn.translate(x, y, 0); head_location.copyFrom(head_locn); for (int cam = 0; cam < rob.head.no_of_stereo_cameras; cam++) { // calculate the position of the centre of the stereo camera // (baseline centre point) pos3D camera_centre_locn = new pos3D(rob.head.calibration[cam].positionOrientation.x, rob.head.calibration[cam].positionOrientation.y, rob.head.calibration[cam].positionOrientation.y); camera_centre_locn = camera_centre_locn.rotate(rob.head.calibration[cam].positionOrientation.pan + rob.head.pan + pan, rob.head.calibration[cam].positionOrientation.tilt, rob.head.calibration[cam].positionOrientation.roll); camera_centre_location[cam] = camera_centre_locn.translate(head_location.x, head_location.y, head_location.z); // where are the left and right cameras? // we need to know this for the origins of the vacancy models float half_baseline_length = rob.head.calibration[cam].baseline / 2; pos3D left_camera_locn = new pos3D(-half_baseline_length, 0, 0); left_camera_locn = left_camera_locn.rotate(rob.head.calibration[cam].positionOrientation.pan + rob.head.pan + pan, rob.head.calibration[cam].positionOrientation.tilt, rob.head.calibration[cam].positionOrientation.roll); pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z); left_camera_location[cam] = left_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z); right_camera_location[cam] = right_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z); right_camera_location[cam].pan = left_camera_location[cam].pan; } }
public void InsertRays() { int no_of_stereo_features = 2000; int image_width = 640; int image_height = 480; int no_of_stereo_cameras = 1; int localisationRadius_mm = 16000; int maxMappingRange_mm = 16000; int cellSize_mm = 32; int dimension_cells = 16000 / cellSize_mm; int dimension_cells_vertical = dimension_cells/2; float vacancyWeighting = 0.5f; float FOV_horizontal = 78 * (float)Math.PI / 180.0f; // create a grid Console.WriteLine("Creating grid"); occupancygridSimple grid = new occupancygridSimple( dimension_cells, dimension_cells_vertical, cellSize_mm, localisationRadius_mm, maxMappingRange_mm, vacancyWeighting); Assert.AreNotEqual(grid, null, "object occupancygridSimple was not created"); Console.WriteLine("Creating sensor models"); stereoModel inverseSensorModel = new stereoModel(); inverseSensorModel.FOV_horizontal = FOV_horizontal; inverseSensorModel.FOV_vertical = FOV_horizontal * image_height / image_width; inverseSensorModel.createLookupTable(cellSize_mm, image_width, image_height); //Assert.AreNotEqual(0, inverseSensorModel.ray_model.probability[1][5], "Ray model probabilities not updated"); // observer parameters int pan_angle_degrees = 0; pos3D observer = new pos3D(0,0,0); observer.pan = pan_angle_degrees * (float)Math.PI / 180.0f; float stereo_camera_baseline_mm = 100; pos3D left_camera_location = new pos3D(stereo_camera_baseline_mm*0.5f,0,0); pos3D right_camera_location = new pos3D(-stereo_camera_baseline_mm*0.5f,0,0); left_camera_location = left_camera_location.rotate(observer.pan, observer.tilt, observer.roll); right_camera_location = right_camera_location.rotate(observer.pan, observer.tilt, observer.roll); left_camera_location = left_camera_location.translate(observer.x, observer.y, observer.z); right_camera_location = right_camera_location.translate(observer.x, observer.y, observer.z); float FOV_degrees = 78; float[] stereo_features = new float[no_of_stereo_features * 3]; byte[,] stereo_features_colour = new byte[no_of_stereo_features, 3]; float[] stereo_features_uncertainties = new float[no_of_stereo_features]; // create some stereo disparities within the field of view Console.WriteLine("Adding disparities"); //MersenneTwister rnd = new MersenneTwister(0); Random rnd = new Random(0); for (int correspondence = 0; correspondence < no_of_stereo_features; correspondence++) { float x = rnd.Next(image_width-1); float y = rnd.Next(image_height/50) + (image_height/2); float disparity = 7; if ((x < image_width/5) || (x > image_width * 4/5)) { disparity = 7; //15; } byte colour_red = (byte)rnd.Next(255); byte colour_green = (byte)rnd.Next(255); byte colour_blue = (byte)rnd.Next(255); stereo_features[correspondence*3] = x; stereo_features[(correspondence*3)+1] = y; stereo_features[(correspondence*3)+2] = disparity; stereo_features_colour[correspondence, 0] = colour_red; stereo_features_colour[correspondence, 1] = colour_green; stereo_features_colour[correspondence, 2] = colour_blue; stereo_features_uncertainties[correspondence] = 0; } // create an observation as a set of rays from the stereo correspondence results List<evidenceRay>[] stereo_rays = new List<evidenceRay>[no_of_stereo_cameras]; for (int cam = 0; cam < no_of_stereo_cameras; cam++) { Console.WriteLine("Creating rays"); stereo_rays[cam] = inverseSensorModel.createObservation( observer, stereo_camera_baseline_mm, image_width, image_height, FOV_degrees, stereo_features, stereo_features_colour, stereo_features_uncertainties, true); // insert rays into the grid Console.WriteLine("Throwing rays"); for (int ray = 0; ray < stereo_rays[cam].Count; ray++) { grid.Insert(stereo_rays[cam][ray], inverseSensorModel.ray_model, left_camera_location, right_camera_location, false); } } // save the result as an image Console.WriteLine("Saving grid"); int debug_img_width = 640; int debug_img_height = 480; byte[] debug_img = new byte[debug_img_width * debug_img_height * 3]; Bitmap bmp = new Bitmap(debug_img_width, debug_img_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); grid.Show(debug_img, debug_img_width, debug_img_height, false, false); BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp); bmp.Save("tests_occupancygrid_simple_InsertRays_overhead.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); grid.ShowFront(debug_img, debug_img_width, debug_img_height, true); BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp); bmp.Save("tests_occupancygrid_simple_InsertRays_front.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); // side view of the probabilities float max_prob = -1; float min_prob = 1; float[] probs = new float[dimension_cells/2]; float[] mean_colour = new float[3]; for (int y = dimension_cells / 2; y < dimension_cells; y++) { float p = grid.GetProbability(dimension_cells / 2, y, mean_colour); probs[y - (dimension_cells / 2)] = p; if (p != occupancygridSimple.NO_OCCUPANCY_EVIDENCE) { if (p < min_prob) min_prob = p; if (p > max_prob) max_prob = p; } } for (int i = 0; i < debug_img.Length; i++) debug_img[i] = 255; int prev_x = -1; int prev_y = debug_img_height / 2; for (int i = 0; i < probs.Length; i++) { if (probs[i] != occupancygridSimple.NO_OCCUPANCY_EVIDENCE) { int x = i * (debug_img_width - 1) / probs.Length; int y = debug_img_height - 1 - (int)((probs[i] - min_prob) / (max_prob - min_prob) * (debug_img_height - 1)); int n = ((y * debug_img_width) + x) * 3; if (prev_x > -1) { int r = 255; int g = 0; int b = 0; if (probs[i] > 0.5f) { r = 0; g = 255; b = 0; } drawing.drawLine(debug_img, debug_img_width, debug_img_height, prev_x, prev_y, x, y, r, g, b, 0, false); } prev_x = x; prev_y = y; } } int y_zero = debug_img_height - 1 - (int)((0.5f-min_prob) / (max_prob - min_prob) * (debug_img_height - 1)); drawing.drawLine(debug_img, debug_img_width, debug_img_height, 0, y_zero, debug_img_width - 1, y_zero, 0, 0, 0, 0, false); BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp); bmp.Save("tests_occupancygrid_simple_InsertRays_probs.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); }
/// <summary> /// Calculate the position of the robots head and cameras for this pose /// the positions returned are relative to the robots centre of rotation /// </summary> /// <param name="body_width_mm">width of the robot body in millimetres</param> /// <param name="body_length_mm">length of the robot body in millimetres</param> /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param> /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param> /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param> /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param> /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param> /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param> /// <param name="head_pan">head pan angle in radians</param> /// <param name="head_tilt">head tilt angle in radians</param> /// <param name="head_roll">head roll angle in radians</param> /// <param name="baseline_mm">stereo camera baseline in millimetres</param> /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param> /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param> /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param> /// <param name="head_location">returned location/orientation of the robot head</param> /// <param name="camera_centre_location">returned stereo camera centre position/orientation</param> /// <param name="left_camera_location">returned left camera position/orientation</param> /// <param name="right_camera_location">returned right camera position/orientation</param> public static void calculateCameraPositions( float body_width_mm, float body_length_mm, float body_centre_of_rotation_x, float body_centre_of_rotation_y, float body_centre_of_rotation_z, float head_centroid_x, float head_centroid_y, float head_centroid_z, float head_pan, float head_tilt, float head_roll, float baseline_mm, float stereo_camera_position_x, float stereo_camera_position_y, float stereo_camera_position_z, float stereo_camera_pan, float stereo_camera_tilt, float stereo_camera_roll, ref pos3D head_location, ref pos3D camera_centre_location, ref pos3D left_camera_location, ref pos3D right_camera_location) { // calculate the position of the centre of the head relative to // the centre of rotation of the robots body pos3D head_centroid = new pos3D( -(body_width_mm * 0.5f) + (body_centre_of_rotation_x - (body_width_mm * 0.5f)) + head_centroid_x, -(body_length_mm * 0.5f) + (body_centre_of_rotation_y - (body_length_mm * 0.5f)) + head_centroid_y, head_centroid_z); // location of the centre of the head // adjusted for the robot pose and the head pan and tilt angle. // Note that the positions and orientations of individual cameras // on the head have already been accounted for within stereoModel.createObservation pos3D head_locn = head_centroid.rotate( head_pan, head_tilt, 0); head_location.copyFrom(head_locn); // calculate the position of the centre of the stereo camera // (baseline centre point) pos3D camera_centre_locn = new pos3D( stereo_camera_position_x, stereo_camera_position_y, stereo_camera_position_z); // rotate the stereo camera camera_centre_locn = camera_centre_locn.rotate( stereo_camera_pan + head_pan, stereo_camera_tilt + head_tilt, stereo_camera_roll + head_roll); // move the stereo camera relative to the head position camera_centre_location = camera_centre_locn.translate( head_location.x, head_location.y, head_location.z); // where are the left and right cameras? // we need to know this for the origins of the vacancy models float half_baseline_length = baseline_mm * 0.5f; pos3D left_camera_locn = new pos3D(-half_baseline_length, 0, 0); left_camera_locn = left_camera_locn.rotate(stereo_camera_pan + head_pan, stereo_camera_tilt + head_tilt, stereo_camera_roll + head_roll); pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z); left_camera_location = left_camera_locn.translate(camera_centre_location.x, camera_centre_location.y, camera_centre_location.z); right_camera_location = right_camera_locn.translate(camera_centre_location.x, camera_centre_location.y, camera_centre_location.z); right_camera_location.pan = left_camera_location.pan; }
/// <summary> /// Mapping /// </summary> /// <param name="body_width_mm">width of the robot body in millimetres</param> /// <param name="body_length_mm">length of the robot body in millimetres</param> /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param> /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param> /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param> /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param> /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param> /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param> /// <param name="head_pan">head pan angle in radians</param> /// <param name="head_tilt">head tilt angle in radians</param> /// <param name="head_roll">head roll angle in radians</param> /// <param name="baseline_mm">stereo camera baseline in millimetres</param> /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param> /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param> /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param> /// <param name="image_width">image width for each stereo camera</param> /// <param name="image_height">image height for each stereo camera</param> /// <param name="FOV_degrees">field of view for each stereo camera in degrees</param> /// <param name="stereo_features">stereo features (disparities) for each stereo camera</param> /// <param name="stereo_features_colour">stereo feature colours for each stereo camera</param> /// <param name="stereo_features_uncertainties">stereo feature uncertainties (priors) for each stereo camera</param> /// <param name="sensormodel">sensor model for each grid level</param> /// <param name="left_camera_location">returned position and orientation of the left camera on each stereo camera</param> /// <param name="right_camera_location">returned position and orientation of the right camera on each stereo camera</param> /// <param name="robot_pose">current estimated position and orientation of the robots centre of rotation</param> public void Map( float body_width_mm, float body_length_mm, float body_centre_of_rotation_x, float body_centre_of_rotation_y, float body_centre_of_rotation_z, float head_centroid_x, float head_centroid_y, float head_centroid_z, float head_pan, float head_tilt, float head_roll, int stereo_camera_index, float baseline_mm, float stereo_camera_position_x, float stereo_camera_position_y, float stereo_camera_position_z, float stereo_camera_pan, float stereo_camera_tilt, float stereo_camera_roll, int image_width, int image_height, float FOV_degrees, float[] stereo_features, byte[,] stereo_features_colour, float[] stereo_features_uncertainties, stereoModel[][] sensormodel, ref pos3D left_camera_location, ref pos3D right_camera_location, pos3D robot_pose) { // positions of the left and right camera relative to the robots centre of rotation pos3D head_location = new pos3D(0, 0, 0); pos3D camera_centre_location = null; pos3D relative_left_cam = null; pos3D relative_right_cam = null; occupancygridBase.calculateCameraPositions( body_width_mm, body_length_mm, body_centre_of_rotation_x, body_centre_of_rotation_y, body_centre_of_rotation_z, head_centroid_x, head_centroid_y, head_centroid_z, head_pan, head_tilt, head_roll, baseline_mm, stereo_camera_position_x, stereo_camera_position_y, stereo_camera_position_z, stereo_camera_pan, stereo_camera_tilt, stereo_camera_roll, ref head_location, ref camera_centre_location, ref relative_left_cam, ref relative_right_cam); left_camera_location = relative_left_cam.translate(robot_pose.x, robot_pose.y, robot_pose.z); right_camera_location = relative_right_cam.translate(robot_pose.x, robot_pose.y, robot_pose.z); pos3D stereo_camera_centre = new pos3D(0, 0, 0); // update the grid // centre position between the left and right cameras stereo_camera_centre.x = left_camera_location.x + ((right_camera_location.x - left_camera_location.x) * 0.5f); stereo_camera_centre.y = left_camera_location.y + ((right_camera_location.y - left_camera_location.y) * 0.5f); stereo_camera_centre.z = left_camera_location.z + ((right_camera_location.z - left_camera_location.z) * 0.5f); stereo_camera_centre.pan = robot_pose.pan + head_pan + stereo_camera_pan; stereo_camera_centre.tilt = robot_pose.tilt + head_tilt + stereo_camera_tilt; stereo_camera_centre.roll = robot_pose.roll + head_roll + stereo_camera_roll; // create a set of stereo rays as observed from this pose List <evidenceRay> rays = sensormodel[stereo_camera_index][0].createObservation( stereo_camera_centre, baseline_mm, image_width, image_height, FOV_degrees, stereo_features, stereo_features_colour, stereo_features_uncertainties, true); // insert rays into the occupancy grid for (int r = 0; r < rays.Count; r++) { Insert(rays[r], sensormodel[stereo_camera_index], left_camera_location, right_camera_location, false); } }