/// <summary> /// returns a version of this evidence ray rotated through the given /// pan angle in the XY plane. This is used for generating trial poses. /// </summary> /// <param name="extra_pan">pan angle to be added</param> /// <param name="translation_x">new obsevation x coordinate</param> /// <param name="translation_y">new obsevation y coordinate</param> /// <returns>evidence ray object</returns> public evidenceRay trialPose( float extra_pan, float extra_tilt, float extra_roll, float translation_x, float translation_y, float translation_z) { evidenceRay rotated_ray = new evidenceRay(); float new_pan_angle = extra_pan + pan_angle; float new_tilt_angle = extra_tilt + tilt_angle; float new_roll_angle = extra_roll + roll_angle; // as observed from the centre of the camera baseline rotated_ray.observedFrom = new pos3D(translation_x, translation_y, translation_z); rotated_ray.fattestPoint = fattestPoint; pos3D r1 = new pos3D(0, start_dist, 0); rotated_ray.vertices[0] = r1.rotate(new_pan_angle, new_tilt_angle, new_roll_angle); rotated_ray.vertices[0].x += translation_x; rotated_ray.vertices[0].y += translation_y; rotated_ray.vertices[0].z += translation_z; pos3D r2 = new pos3D(0, start_dist + length, 0); rotated_ray.vertices[1] = r2.rotate(new_pan_angle, new_tilt_angle, new_roll_angle); rotated_ray.vertices[1].x += translation_x; rotated_ray.vertices[1].y += translation_y; rotated_ray.vertices[1].z += translation_z; rotated_ray.pan_angle = new_pan_angle; rotated_ray.tilt_angle = new_tilt_angle; rotated_ray.roll_angle = new_roll_angle; rotated_ray.pan_index = (int)(new_pan_angle * pan_steps / 6.2831854f); rotated_ray.tilt_index = (int)(new_tilt_angle * pan_steps / 6.2831854f); rotated_ray.roll_index = (int)(new_roll_angle * pan_steps / 6.2831854f); rotated_ray.length = length; rotated_ray.width = width; rotated_ray.start_dist = start_dist; rotated_ray.disparity = disparity; for (int col = 2; col >= 0; col--) { rotated_ray.colour[col] = colour[col]; } return(rotated_ray); }
/// <summary> /// calculate the position of the robots head and cameras for this pose /// </summary> /// <param name="rob"></param> /// <param name="head_location">location of the centre of the head</param> /// <param name="camera_centre_location">location of the centre of each stereo camera</param> /// <param name="left_camera_location">location of the left camera within each stereo camera</param> /// <param name="right_camera_location">location of the right camera within each stereo camera</param> public void calculateCameraPositions( robot rob, ref pos3D head_location, ref pos3D[] camera_centre_location, ref pos3D[] left_camera_location, ref pos3D[] right_camera_location) { // calculate the position of the centre of the head relative to // the centre of rotation of the robots body pos3D head_centroid = new pos3D( -(rob.BodyWidth_mm / 2) + rob.head.x, -(rob.BodyLength_mm / 2) + rob.head.y, rob.head.z); // location of the centre of the head on the grid map // adjusted for the robot pose and the head pan and tilt angle. // Note that the positions and orientations of individual cameras // on the head have already been accounted for within stereoModel.createObservation pos3D head_locn = head_centroid.rotate(rob.head.pan + pan, rob.head.tilt + tilt, rob.head.roll + roll); head_locn = head_locn.translate(x, y, 0); head_location.copyFrom(head_locn); for (int cam = 0; cam < rob.head.no_of_stereo_cameras; cam++) { // calculate the position of the centre of the stereo camera // (baseline centre point) pos3D camera_centre_locn = new pos3D(rob.head.cameraPositionHeadRelative[cam].x, rob.head.cameraPositionHeadRelative[cam].y, rob.head.cameraPositionHeadRelative[cam].y); camera_centre_locn = camera_centre_locn.rotate(rob.head.cameraPositionHeadRelative[cam].pan + rob.head.pan + pan, rob.head.cameraPositionHeadRelative[cam].tilt, rob.head.cameraPositionHeadRelative[cam].roll); camera_centre_location[cam] = camera_centre_locn.translate(head_location.x, head_location.y, head_location.z); // where are the left and right cameras? // we need to know this for the origins of the vacancy models float half_baseline_length = rob.head.cameraBaseline[cam] / 2; pos3D left_camera_locn = new pos3D(-half_baseline_length, 0, 0); left_camera_locn = left_camera_locn.rotate(rob.head.cameraPositionHeadRelative[cam].pan + rob.head.pan + pan, rob.head.cameraPositionHeadRelative[cam].tilt, rob.head.cameraPositionHeadRelative[cam].roll); pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z); left_camera_location[cam] = left_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z); right_camera_location[cam] = right_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z); right_camera_location[cam].pan = left_camera_location[cam].pan; } }
/// <summary> /// Calculate the position of the robots head and cameras for this pose /// the positions returned are relative to the robots centre of rotation /// </summary> /// <param name="body_width_mm">width of the robot body in millimetres</param> /// <param name="body_length_mm">length of the robot body in millimetres</param> /// <param name="body_centre_of_rotation_x">x centre of rotation of the robot, relative to the top left corner</param> /// <param name="body_centre_of_rotation_y">y centre of rotation of the robot, relative to the top left corner</param> /// <param name="body_centre_of_rotation_z">z centre of rotation of the robot, relative to the top left corner</param> /// <param name="head_centroid_x">head centroid x position in millimetres relative to the top left corner of the body</param> /// <param name="head_centroid_y">head centroid y position in millimetres relative to the top left corner of the body</param> /// <param name="head_centroid_z">head centroid z position in millimetres relative to the top left corner of the body</param> /// <param name="head_pan">head pan angle in radians</param> /// <param name="head_tilt">head tilt angle in radians</param> /// <param name="head_roll">head roll angle in radians</param> /// <param name="baseline_mm">stereo camera baseline in millimetres</param> /// <param name="stereo_camera_position_x">stereo camera x position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_position_y">stereo camera y position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_position_z">stereo camera z position in millimetres relative to the head centroid</param> /// <param name="stereo_camera_pan">stereo camera pan in radians relative to the head</param> /// <param name="stereo_camera_tilt">stereo camera tilt in radians relative to the head</param> /// <param name="stereo_camera_roll">stereo camera roll in radians relative to the head</param> /// <param name="head_location">returned location/orientation of the robot head</param> /// <param name="camera_centre_location">returned stereo camera centre position/orientation</param> /// <param name="left_camera_location">returned left camera position/orientation</param> /// <param name="right_camera_location">returned right camera position/orientation</param> public static void calculateCameraPositions( float body_width_mm, float body_length_mm, float body_centre_of_rotation_x, float body_centre_of_rotation_y, float body_centre_of_rotation_z, float head_centroid_x, float head_centroid_y, float head_centroid_z, float head_pan, float head_tilt, float head_roll, float baseline_mm, float stereo_camera_position_x, float stereo_camera_position_y, float stereo_camera_position_z, float stereo_camera_pan, float stereo_camera_tilt, float stereo_camera_roll, ref pos3D head_location, ref pos3D camera_centre_location, ref pos3D left_camera_location, ref pos3D right_camera_location) { // calculate the position of the centre of the head relative to // the centre of rotation of the robots body pos3D head_centroid = new pos3D( -(body_width_mm * 0.5f) + (body_centre_of_rotation_x - (body_width_mm * 0.5f)) + head_centroid_x, -(body_length_mm * 0.5f) + (body_centre_of_rotation_y - (body_length_mm * 0.5f)) + head_centroid_y, head_centroid_z); // location of the centre of the head // adjusted for the robot pose and the head pan and tilt angle. // Note that the positions and orientations of individual cameras // on the head have already been accounted for within stereoModel.createObservation pos3D head_locn = head_centroid.rotate( head_pan, head_tilt, 0); head_location.copyFrom(head_locn); // calculate the position of the centre of the stereo camera // (baseline centre point) pos3D camera_centre_locn = new pos3D( stereo_camera_position_x, stereo_camera_position_y, stereo_camera_position_z); // rotate the stereo camera camera_centre_locn = camera_centre_locn.rotate( stereo_camera_pan + head_pan, stereo_camera_tilt + head_tilt, stereo_camera_roll + head_roll); // move the stereo camera relative to the head position camera_centre_location = camera_centre_locn.translate( head_location.x, head_location.y, head_location.z); // where are the left and right cameras? // we need to know this for the origins of the vacancy models float half_baseline_length = baseline_mm * 0.5f; pos3D left_camera_locn = new pos3D(-half_baseline_length, 0, 0); left_camera_locn = left_camera_locn.rotate(stereo_camera_pan + head_pan, stereo_camera_tilt + head_tilt, stereo_camera_roll + head_roll); pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z); left_camera_location = left_camera_locn.translate(camera_centre_location.x, camera_centre_location.y, camera_centre_location.z); right_camera_location = right_camera_locn.translate(camera_centre_location.x, camera_centre_location.y, camera_centre_location.z); right_camera_location.pan = left_camera_location.pan; }
/// <summary> /// calculate the position of the robots head and cameras for this pose /// </summary> /// <param name="rob">robot object</param> /// <param name="head_location">location of the centre of the head</param> /// <param name="camera_centre_location">location of the centre of each stereo camera</param> /// <param name="left_camera_location">location of the left camera within each stereo camera</param> /// <param name="right_camera_location">location of the right camera within each stereo camera</param> protected void calculateCameraPositions( robot rob, ref pos3D head_location, ref pos3D[] camera_centre_location, ref pos3D[] left_camera_location, ref pos3D[] right_camera_location) { // calculate the position of the centre of the head relative to // the centre of rotation of the robots body pos3D head_centroid = new pos3D( -(rob.BodyWidth_mm / 2) + rob.head.x, -(rob.BodyLength_mm / 2) + rob.head.y, rob.head.z); // location of the centre of the head on the grid map // adjusted for the robot pose and the head pan and tilt angle. // Note that the positions and orientations of individual cameras // on the head have already been accounted for within stereoModel.createObservation pos3D head_locn = head_centroid.rotate(rob.head.pan + pan, rob.head.tilt, 0); head_locn = head_locn.translate(x, y, 0); head_location.copyFrom(head_locn); for (int cam = 0; cam < rob.head.no_of_stereo_cameras; cam++) { // calculate the position of the centre of the stereo camera // (baseline centre point) pos3D camera_centre_locn = new pos3D(rob.head.cameraPositionHeadRelative[cam].x, rob.head.cameraPositionHeadRelative[cam].y, rob.head.cameraPositionHeadRelative[cam].y); camera_centre_locn = camera_centre_locn.rotate(rob.head.cameraPositionHeadRelative[cam].pan + rob.head.pan + pan, rob.head.cameraPositionHeadRelative[cam].tilt, rob.head.cameraPositionHeadRelative[cam].roll); camera_centre_location[cam] = camera_centre_locn.translate(head_location.x, head_location.y, head_location.z); // where are the left and right cameras? // we need to know this for the origins of the vacancy models float half_baseline_length = rob.head.cameraBaseline[cam] / 2; pos3D left_camera_locn = new pos3D(-half_baseline_length, 0, 0); left_camera_locn = left_camera_locn.rotate(rob.head.cameraPositionHeadRelative[cam].pan + rob.head.pan + pan, rob.head.cameraPositionHeadRelative[cam].tilt, rob.head.cameraPositionHeadRelative[cam].roll); pos3D right_camera_locn = new pos3D(-left_camera_locn.x, -left_camera_locn.y, -left_camera_locn.z); left_camera_location[cam] = left_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z); right_camera_location[cam] = right_camera_locn.translate(camera_centre_location[cam].x, camera_centre_location[cam].y, camera_centre_location[cam].z); right_camera_location[cam].pan = left_camera_location[cam].pan; } }
/// <summary> /// probes the grid and returns a set of ranges /// </summary> /// <param name="pose">current best pose estimate</param> /// <param name="camera_x_mm">camera sensor x coordinate</param> /// <param name="camera_y_mm">camera sensor y coordinate</param> /// <param name="camera_z_mm">camera sensor z coordinate</param> /// <param name="camera_pan">camera pan in radians</param> /// <param name="camera_tilt">camera tilt in radians</param> /// <param name="camera_roll">camera roll in radians</param> /// <param name="camera_FOV_degrees">camera field of view in degrees</param> /// <param name="camera_resolution_width">camera resolution pixels horizontal</param> /// <param name="camera_resolution_height">camera resolution pixels vertical</param> /// <param name="step_size">sampling step size in pixels</param> /// <param name="max_range_mm">maximum range</param> /// <param name="use_ground_plane">whether to detect the ground plane, z=0</param> /// <param name="range_buffer">buffer storing returned ranges (-1 = range unknown)</param> public void ProbeView( particlePose pose, float camera_x_mm, float camera_y_mm, float camera_z_mm, float camera_pan, float camera_tilt, float camera_roll, float camera_FOV_degrees, int camera_resolution_width, int camera_resolution_height, int step_size, float max_range_mm, bool use_ground_plane, float[] range_buffer) { float camera_FOV_radians = camera_FOV_degrees * (float)Math.PI / 180.0f; float camera_FOV_radians2 = camera_FOV_radians * camera_resolution_height / camera_resolution_width; int n = 0; for (int camy = 0; camy < camera_resolution_height; camy+=step_size) { float tilt = (camy * camera_FOV_radians2 / camera_resolution_height) - (camera_FOV_radians2*0.5f); float z = max_range_mm*(float)Math.Tan(tilt); for (int camx = 0; camx < camera_resolution_width; camx+=step_size, n++) { float pan = (camx * camera_FOV_radians / camera_resolution_width) - (camera_FOV_radians*0.5f); float x = max_range_mm*(float)Math.Tan(pan); pos3D p = new pos3D(x,max_range_mm,z); pos3D p2 = p.rotate(camera_pan, camera_tilt, camera_roll); range_buffer[n] = ProbeRange( pose, camera_x_mm, camera_y_mm, camera_z_mm, camera_x_mm + p2.x, camera_y_mm + p2.y, camera_z_mm + p2.z, use_ground_plane); } } }
/// <summary> /// returns a version of this evidence ray rotated through the given /// pan angle in the XY plane. This is used for generating trial poses. /// </summary> /// <param name="extra_pan">pan angle to be added</param> /// <param name="translation_x">new obsevation x coordinate</param> /// <param name="translation_y">new obsevation y coordinate</param> /// <returns>evidence ray object</returns> public evidenceRay trialPose( float extra_pan, float extra_tilt, float extra_roll, float translation_x, float translation_y, float translation_z) { evidenceRay rotated_ray = new evidenceRay(); float new_pan_angle = extra_pan + pan_angle; float new_tilt_angle = extra_tilt + tilt_angle; float new_roll_angle = extra_roll + roll_angle; // as observed from the centre of the camera baseline rotated_ray.observedFrom = new pos3D(translation_x, translation_y, translation_z); rotated_ray.fattestPoint = fattestPoint; pos3D r1 = new pos3D(0, start_dist, 0); rotated_ray.vertices[0] = r1.rotate(new_pan_angle, new_tilt_angle, new_roll_angle); rotated_ray.vertices[0].x += translation_x; rotated_ray.vertices[0].y += translation_y; rotated_ray.vertices[0].z += translation_z; pos3D r2 = new pos3D(0, start_dist+length, 0); rotated_ray.vertices[1] = r2.rotate(new_pan_angle, new_tilt_angle, new_roll_angle); rotated_ray.vertices[1].x += translation_x; rotated_ray.vertices[1].y += translation_y; rotated_ray.vertices[1].z += translation_z; rotated_ray.pan_angle = new_pan_angle; rotated_ray.tilt_angle = new_tilt_angle; rotated_ray.roll_angle = new_roll_angle; rotated_ray.pan_index = (int)(new_pan_angle * pan_steps / 6.2831854f); rotated_ray.tilt_index = (int)(new_tilt_angle * pan_steps / 6.2831854f); rotated_ray.roll_index = (int)(new_roll_angle * pan_steps / 6.2831854f); rotated_ray.length = length; rotated_ray.width = width; rotated_ray.start_dist = start_dist; rotated_ray.disparity = disparity; for (int col = 2; col >= 0; col--) rotated_ray.colour[col] = colour[col]; return (rotated_ray); }
public void CreateObservation() { float baseline = 120; int image_width = 320; int image_height = 240; float FOV_degrees = 68; int no_of_stereo_features = 10; float[] stereo_features = new float[no_of_stereo_features*4]; byte[] stereo_features_colour = new byte[no_of_stereo_features*3]; bool translate = false; for (int i = 0; i < no_of_stereo_features; i++) { stereo_features[i*4] = 1; stereo_features[i*4+1] = i * image_width / no_of_stereo_features; stereo_features[i*4+2] = image_height/2; stereo_features[i*4+3] = 1; stereo_features_colour[i*3] = 200; stereo_features_colour[i*3+1] = 200; stereo_features_colour[i*3+2] = 200; } for (int rotation_degrees = 0; rotation_degrees < 360; rotation_degrees += 90) { stereoModel model = new stereoModel(); pos3D observer = new pos3D(0,0,0); observer = observer.rotate(rotation_degrees/180.0f*(float)Math.PI,0,0); List<evidenceRay> rays = model.createObservation( observer, baseline, image_width, image_height, FOV_degrees, stereo_features, stereo_features_colour, translate); float tx = float.MaxValue; float ty = float.MaxValue; float bx = float.MinValue; float by = float.MinValue; for (int i = 0; i < no_of_stereo_features; i++) { //float pan_degrees = rays[i].pan_angle * 180 / (float)Math.PI; //Console.WriteLine(pan_degrees.ToString()); for (int j = 0; j < rays[i].vertices.Length; j++) { Console.WriteLine("Vertex " + j.ToString()); Console.WriteLine("xyz: " + rays[i].vertices[j].x.ToString() + " " + rays[i].vertices[j].y.ToString() + " " + rays[i].vertices[j].z.ToString()); if (rays[i].vertices[j].x < tx) tx = rays[i].vertices[j].x; if (rays[i].vertices[j].x > bx) bx = rays[i].vertices[j].x; if (rays[i].vertices[j].y < ty) ty = rays[i].vertices[j].y; if (rays[i].vertices[j].y > by) by = rays[i].vertices[j].y; } } int img_width = 640; Bitmap bmp = new Bitmap(img_width, img_width, System.Drawing.Imaging.PixelFormat.Format24bppRgb); byte[] img = new byte[img_width * img_width * 3]; for (int i = 0; i < img.Length; i++) img[i] = 255; for (int i = 0; i < no_of_stereo_features; i++) { int x0 = (int)((rays[i].vertices[0].x - tx) * img_width / (bx - tx)); int y0 = (int)((rays[i].vertices[0].y - ty) * img_width / (by - ty)); int x1 = (int)((rays[i].vertices[1].x - tx) * img_width / (bx - tx)); int y1 = (int)((rays[i].vertices[1].y - ty) * img_width / (by - ty)); drawing.drawLine(img, img_width, img_width, x0,y0,x1,y1,0,0,0,0,false); } BitmapArrayConversions.updatebitmap_unsafe(img, bmp); bmp.Save("dpslam_tests_createobservation_" + rotation_degrees.ToString() + ".bmp", System.Drawing.Imaging.ImageFormat.Bmp); Console.WriteLine("dpslam_tests_createobservation_" + rotation_degrees.ToString() + ".bmp"); } }