public void CreateSensorModels( metagridBuffer buf) { List <int> cell_sizes = buf.GetCellSizes(); sensormodel = new stereoModel[image_width.Length][]; for (int stereo_cam = 0; stereo_cam < image_width.Length; stereo_cam++) { sensormodel[stereo_cam] = new stereoModel[cell_sizes.Count]; } for (int stereo_cam = 0; stereo_cam < image_width.Length; stereo_cam++) { for (int grid_level = 0; grid_level < cell_sizes.Count; grid_level++) { if (stereo_cam > 0) { if (image_width[stereo_cam - 1] == image_width[stereo_cam]) { sensormodel[stereo_cam][grid_level] = sensormodel[stereo_cam - 1][grid_level]; } } if (sensormodel[stereo_cam][grid_level] == null) { sensormodel[stereo_cam][grid_level] = new stereoModel(); sensormodel[stereo_cam][grid_level].createLookupTable( cell_sizes[grid_level], image_width[stereo_cam], image_height[stereo_cam]); } } } }
public processstereo() { disparities = new float[MAX_FEATURES * 3]; tracking = new sentienceTracking(); radar = new sentienceRadar(); robot_head = new stereoHead(1); stereo_model = new stereoModel(); }
public void CreateRay() { stereoModel model = new stereoModel(); evidenceRay ray = model.createRay(20, 10, 5, 0, 255, 255, 255); Assert.IsNotNull(ray.vertices); Assert.IsNotNull(ray.vertices[0]); Assert.IsNotNull(ray.vertices[1]); }
/// <summary> /// initialise variables prior to performing test routines /// </summary> private void init() { grid_layer = new float[grid_dimension, grid_dimension, 3]; pos3D_x = new float[4]; pos3D_y = new float[4]; stereo_model = new stereoModel(); robot_head = new stereoHead(4); stereo_features = new float[900]; stereo_uncertainties = new float[900]; img_rays = new byte[standard_width * standard_height * 3]; imgOutput.Pixbuf = GtkBitmap.createPixbuf(standard_width, standard_height); GtkBitmap.setBitmap(img_rays, imgOutput); }
private void gaussianFunctionToolStripMenuItem_Click(object sender, EventArgs e) { grid_layer = new float[grid_dimension, grid_dimension, 3]; pos3D_x = new float[4]; pos3D_y = new float[4]; stereo_model = new stereoModel(); robot_head = new stereoHead(4); stereo_features = new float[900]; stereo_uncertainties = new float[900]; img_rays = new Byte[standard_width * standard_height * 3]; rays = new Bitmap(standard_width, standard_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); picRays.Image = rays; stereo_model.showDistribution(img_rays, standard_width, standard_height); BitmapArrayConversions.updatebitmap_unsafe(img_rays, (Bitmap)picRays.Image); }
private void multipleStereoRaysToolStripMenuItem_Click(object sender, EventArgs e) { grid_layer = new float[grid_dimension, grid_dimension, 3]; pos3D_x = new float[4]; pos3D_y = new float[4]; stereo_model = new stereoModel(); robot_head = new stereoHead(4); stereo_features = new float[900]; stereo_uncertainties = new float[900]; img_rays = new Byte[standard_width * standard_height * 3]; rays = new Bitmap(standard_width, standard_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); picRays.Image = rays; bool mirror = false; stereo_model.showProbabilities(grid_layer, grid_dimension, img_rays, standard_width, standard_height, false, true, mirror); BitmapArrayConversions.updatebitmap_unsafe(img_rays, (Bitmap)picRays.Image); }
public frmMain() { InitializeComponent(); // Set the help text description for the FolderBrowserDialog. folderBrowserDialog1.Description = "Select the directory where stereo images are located"; // Do not allow the user to create new files via the FolderBrowserDialog. folderBrowserDialog1.ShowNewFolderButton = false; txtImagesDirectory.Text = images_directory; openFileDialog1.DefaultExt = "xml"; openFileDialog1.Filter = "xml files (*.xml)|*.xml"; txtCalibrationFilename.Text = calibration_filename; // sensor model used for mapping inverseSensorModel = new stereoModel(); }
/// <summary> /// parse an xml node to extract buffer parameters /// </summary> /// <param name="xnod"></param> /// <param name="level"></param> public void LoadFromXmlSensorModels( XmlNode xnod, int level, ref int no_of_stereo_cameras, ref int no_of_grid_levels, ref int camera_index, ref int grid_level) { XmlNode xnodWorking; if (xnod.Name == "NoOfStereoCameras") { no_of_stereo_cameras = Convert.ToInt32(xnod.InnerText); } if (xnod.Name == "NoOfGridLevels") { no_of_grid_levels = Convert.ToInt32(xnod.InnerText); camera_index = 0; grid_level = -1; sensormodel = new stereoModel[no_of_stereo_cameras][]; for (int stereo_cam = 0; stereo_cam < no_of_stereo_cameras; stereo_cam++) { sensormodel[stereo_cam] = new stereoModel[no_of_grid_levels]; for (int size = 0; size < no_of_grid_levels; size++) { sensormodel[stereo_cam][size] = new stereoModel(); sensormodel[stereo_cam][size].ray_model = new rayModelLookup(1, 1); } } } if (xnod.Name == "Model") { grid_level++; if (grid_level >= no_of_grid_levels) { grid_level = 0; camera_index++; } List <string> rayModelsData = new List <string>(); sensormodel[camera_index][grid_level].ray_model.LoadFromXml(xnod, level + 1, rayModelsData); sensormodel[camera_index][grid_level].ray_model.LoadSensorModelData(rayModelsData); if (rayModelsData.Count == 0) { Console.WriteLine("Warning: ray models not loaded"); } } // call recursively on all children of the current node if (xnod.HasChildNodes) { xnodWorking = xnod.FirstChild; while (xnodWorking != null) { LoadFromXmlSensorModels(xnodWorking, level + 1, ref no_of_stereo_cameras, ref no_of_grid_levels, ref camera_index, ref grid_level); xnodWorking = xnodWorking.NextSibling; } } }
public void CreateObservation() { float baseline = 120; int image_width = 320; int image_height = 240; float FOV_degrees = 68; int no_of_stereo_features = 10; float[] stereo_features = new float[no_of_stereo_features * 4]; byte[] stereo_features_colour = new byte[no_of_stereo_features * 3]; bool translate = false; for (int i = 0; i < no_of_stereo_features; i++) { stereo_features[i * 4] = 1; stereo_features[i * 4 + 1] = i * image_width / no_of_stereo_features; stereo_features[i * 4 + 2] = image_height / 2; stereo_features[i * 4 + 3] = 1; stereo_features_colour[i * 3] = 200; stereo_features_colour[i * 3 + 1] = 200; stereo_features_colour[i * 3 + 2] = 200; } for (int rotation_degrees = 0; rotation_degrees < 360; rotation_degrees += 90) { stereoModel model = new stereoModel(); pos3D observer = new pos3D(0, 0, 0); observer = observer.rotate(rotation_degrees / 180.0f * (float)Math.PI, 0, 0); List <evidenceRay> rays = model.createObservation( observer, baseline, image_width, image_height, FOV_degrees, stereo_features, stereo_features_colour, translate); float tx = float.MaxValue; float ty = float.MaxValue; float bx = float.MinValue; float by = float.MinValue; for (int i = 0; i < no_of_stereo_features; i++) { //float pan_degrees = rays[i].pan_angle * 180 / (float)Math.PI; //Console.WriteLine(pan_degrees.ToString()); for (int j = 0; j < rays[i].vertices.Length; j++) { Console.WriteLine("Vertex " + j.ToString()); Console.WriteLine("xyz: " + rays[i].vertices[j].x.ToString() + " " + rays[i].vertices[j].y.ToString() + " " + rays[i].vertices[j].z.ToString()); if (rays[i].vertices[j].x < tx) { tx = rays[i].vertices[j].x; } if (rays[i].vertices[j].x > bx) { bx = rays[i].vertices[j].x; } if (rays[i].vertices[j].y < ty) { ty = rays[i].vertices[j].y; } if (rays[i].vertices[j].y > by) { by = rays[i].vertices[j].y; } } } int img_width = 640; Bitmap bmp = new Bitmap(img_width, img_width, System.Drawing.Imaging.PixelFormat.Format24bppRgb); byte[] img = new byte[img_width * img_width * 3]; for (int i = 0; i < img.Length; i++) { img[i] = 255; } for (int i = 0; i < no_of_stereo_features; i++) { int x0 = (int)((rays[i].vertices[0].x - tx) * img_width / (bx - tx)); int y0 = (int)((rays[i].vertices[0].y - ty) * img_width / (by - ty)); int x1 = (int)((rays[i].vertices[1].x - tx) * img_width / (bx - tx)); int y1 = (int)((rays[i].vertices[1].y - ty) * img_width / (by - ty)); drawing.drawLine(img, img_width, img_width, x0, y0, x1, y1, 0, 0, 0, 0, false); } BitmapArrayConversions.updatebitmap_unsafe(img, bmp); bmp.Save("dpslam_tests_createobservation_" + rotation_degrees.ToString() + ".bmp", System.Drawing.Imaging.ImageFormat.Bmp); Console.WriteLine("dpslam_tests_createobservation_" + rotation_degrees.ToString() + ".bmp"); } }
public void InsertRays() { int no_of_stereo_features = 2000; int image_width = 640; int image_height = 480; int no_of_stereo_cameras = 1; int localisationRadius_mm = 16000; int maxMappingRange_mm = 16000; int cellSize_mm = 32; int dimension_cells = 16000 / cellSize_mm; int dimension_cells_vertical = dimension_cells / 2; float vacancyWeighting = 0.5f; float FOV_horizontal = 78 * (float)Math.PI / 180.0f; // create a grid Console.WriteLine("Creating grid"); occupancygridSimple grid = new occupancygridSimple( dimension_cells, dimension_cells_vertical, cellSize_mm, localisationRadius_mm, maxMappingRange_mm, vacancyWeighting); Assert.AreNotEqual(grid, null, "object occupancygridSimple was not created"); Console.WriteLine("Creating sensor models"); stereoModel inverseSensorModel = new stereoModel(); inverseSensorModel.FOV_horizontal = FOV_horizontal; inverseSensorModel.FOV_vertical = FOV_horizontal * image_height / image_width; inverseSensorModel.createLookupTable(cellSize_mm, image_width, image_height); //Assert.AreNotEqual(0, inverseSensorModel.ray_model.probability[1][5], "Ray model probabilities not updated"); // observer parameters int pan_angle_degrees = 0; pos3D observer = new pos3D(0, 0, 0); observer.pan = pan_angle_degrees * (float)Math.PI / 180.0f; float stereo_camera_baseline_mm = 100; pos3D left_camera_location = new pos3D(stereo_camera_baseline_mm * 0.5f, 0, 0); pos3D right_camera_location = new pos3D(-stereo_camera_baseline_mm * 0.5f, 0, 0); left_camera_location = left_camera_location.rotate(observer.pan, observer.tilt, observer.roll); right_camera_location = right_camera_location.rotate(observer.pan, observer.tilt, observer.roll); left_camera_location = left_camera_location.translate(observer.x, observer.y, observer.z); right_camera_location = right_camera_location.translate(observer.x, observer.y, observer.z); float FOV_degrees = 78; float[] stereo_features = new float[no_of_stereo_features * 3]; byte[,] stereo_features_colour = new byte[no_of_stereo_features, 3]; float[] stereo_features_uncertainties = new float[no_of_stereo_features]; // create some stereo disparities within the field of view Console.WriteLine("Adding disparities"); //MersenneTwister rnd = new MersenneTwister(0); Random rnd = new Random(0); for (int correspondence = 0; correspondence < no_of_stereo_features; correspondence++) { float x = rnd.Next(image_width - 1); float y = rnd.Next(image_height / 50) + (image_height / 2); float disparity = 7; if ((x < image_width / 5) || (x > image_width * 4 / 5)) { disparity = 7; //15; } byte colour_red = (byte)rnd.Next(255); byte colour_green = (byte)rnd.Next(255); byte colour_blue = (byte)rnd.Next(255); stereo_features[correspondence * 3] = x; stereo_features[(correspondence * 3) + 1] = y; stereo_features[(correspondence * 3) + 2] = disparity; stereo_features_colour[correspondence, 0] = colour_red; stereo_features_colour[correspondence, 1] = colour_green; stereo_features_colour[correspondence, 2] = colour_blue; stereo_features_uncertainties[correspondence] = 0; } // create an observation as a set of rays from the stereo correspondence results List <evidenceRay>[] stereo_rays = new List <evidenceRay> [no_of_stereo_cameras]; for (int cam = 0; cam < no_of_stereo_cameras; cam++) { Console.WriteLine("Creating rays"); stereo_rays[cam] = inverseSensorModel.createObservation( observer, stereo_camera_baseline_mm, image_width, image_height, FOV_degrees, stereo_features, stereo_features_colour, stereo_features_uncertainties, true); // insert rays into the grid Console.WriteLine("Throwing rays"); for (int ray = 0; ray < stereo_rays[cam].Count; ray++) { grid.Insert(stereo_rays[cam][ray], inverseSensorModel.ray_model, left_camera_location, right_camera_location, false); } } // save the result as an image Console.WriteLine("Saving grid"); int debug_img_width = 640; int debug_img_height = 480; byte[] debug_img = new byte[debug_img_width * debug_img_height * 3]; Bitmap bmp = new Bitmap(debug_img_width, debug_img_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); grid.Show(debug_img, debug_img_width, debug_img_height, false, false); BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp); bmp.Save("tests_occupancygrid_simple_InsertRays_overhead.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); grid.ShowFront(debug_img, debug_img_width, debug_img_height, true); BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp); bmp.Save("tests_occupancygrid_simple_InsertRays_front.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); // side view of the probabilities float max_prob = -1; float min_prob = 1; float[] probs = new float[dimension_cells / 2]; float[] mean_colour = new float[3]; for (int y = dimension_cells / 2; y < dimension_cells; y++) { float p = grid.GetProbability(dimension_cells / 2, y, mean_colour); probs[y - (dimension_cells / 2)] = p; if (p != occupancygridSimple.NO_OCCUPANCY_EVIDENCE) { if (p < min_prob) { min_prob = p; } if (p > max_prob) { max_prob = p; } } } for (int i = 0; i < debug_img.Length; i++) { debug_img[i] = 255; } int prev_x = -1; int prev_y = debug_img_height / 2; for (int i = 0; i < probs.Length; i++) { if (probs[i] != occupancygridSimple.NO_OCCUPANCY_EVIDENCE) { int x = i * (debug_img_width - 1) / probs.Length; int y = debug_img_height - 1 - (int)((probs[i] - min_prob) / (max_prob - min_prob) * (debug_img_height - 1)); int n = ((y * debug_img_width) + x) * 3; if (prev_x > -1) { int r = 255; int g = 0; int b = 0; if (probs[i] > 0.5f) { r = 0; g = 255; b = 0; } drawing.drawLine(debug_img, debug_img_width, debug_img_height, prev_x, prev_y, x, y, r, g, b, 0, false); } prev_x = x; prev_y = y; } } int y_zero = debug_img_height - 1 - (int)((0.5f - min_prob) / (max_prob - min_prob) * (debug_img_height - 1)); drawing.drawLine(debug_img, debug_img_width, debug_img_height, 0, y_zero, debug_img_width - 1, y_zero, 0, 0, 0, 0, false); BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp); bmp.Save("tests_occupancygrid_simple_InsertRays_probs.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); }
public void EvidenceRayRotation() { int debug_img_width = 640; int debug_img_height = 480; byte[] debug_img = new byte[debug_img_width * debug_img_height * 3]; for (int i = (debug_img_width * debug_img_height * 3) - 1; i >= 0; i--) { debug_img[i] = 255; } Bitmap bmp = new Bitmap(debug_img_width, debug_img_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); int cellSize_mm = 32; int image_width = 320; int image_height = 240; Console.WriteLine("Creating sensor models"); stereoModel inverseSensorModel = new stereoModel(); inverseSensorModel.createLookupTable(cellSize_mm, image_width, image_height); // create a ray float FOV_horizontal = 78 * (float)Math.PI / 180.0f; inverseSensorModel.FOV_horizontal = FOV_horizontal; inverseSensorModel.FOV_vertical = FOV_horizontal * image_height / image_width; evidenceRay ray = inverseSensorModel.createRay( image_width / 2, image_height / 2, 4, 0, 255, 255, 255); Assert.AreNotEqual(null, ray, "No ray was created"); Assert.AreNotEqual(null, ray.vertices, "No ray vertices were created"); pos3D[] start_vertices = (pos3D[])ray.vertices.Clone(); Console.WriteLine("x,y,z: " + start_vertices[0].x.ToString() + ", " + start_vertices[0].y.ToString() + ", " + start_vertices[0].z.ToString()); for (int i = 0; i < ray.vertices.Length; i++) { int j = i + 1; if (j == ray.vertices.Length) { j = 0; } int x0 = (debug_img_width / 2) + (int)ray.vertices[i].x / 50; int y0 = (debug_img_height / 2) + (int)ray.vertices[i].y / 50; int x1 = (debug_img_width / 2) + (int)ray.vertices[j].x / 50; int y1 = (debug_img_height / 2) + (int)ray.vertices[j].y / 50; drawing.drawLine(debug_img, debug_img_width, debug_img_height, x0, y0, x1, y1, 0, 255, 0, 0, false); } float angle_degrees = 30; float angle_radians = angle_degrees / 180.0f * (float)Math.PI; pos3D rotation = new pos3D(0, 0, 0); rotation.pan = angle_degrees; ray.translateRotate(rotation); Console.WriteLine("x,y,z: " + ray.vertices[0].x.ToString() + ", " + ray.vertices[0].y.ToString() + ", " + ray.vertices[0].z.ToString()); for (int i = 0; i < ray.vertices.Length; i++) { int j = i + 1; if (j == ray.vertices.Length) { j = 0; } int x0 = (debug_img_width / 2) + (int)ray.vertices[i].x / 50; int y0 = (debug_img_height / 2) + (int)ray.vertices[i].y / 50; int x1 = (debug_img_width / 2) + (int)ray.vertices[j].x / 50; int y1 = (debug_img_height / 2) + (int)ray.vertices[j].y / 50; drawing.drawLine(debug_img, debug_img_width, debug_img_height, x0, y0, x1, y1, 255, 0, 0, 0, false); } BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp); bmp.Save("tests_occupancygrid_simple_EvidenceRayRotation.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); }
public void RaysIntersection() { int debug_img_width = 640; int debug_img_height = 480; byte[] debug_img = new byte[debug_img_width * debug_img_height * 3]; for (int i = (debug_img_width * debug_img_height * 3) - 1; i >= 0; i--) { debug_img[i] = 255; } Bitmap bmp = new Bitmap(debug_img_width, debug_img_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); float min_x = float.MaxValue, max_x = float.MinValue; float min_y = 0, max_y = float.MinValue; float ray_uncertainty = 0.5f; List <float> x_start = new List <float>(); List <float> y_start = new List <float>(); List <float> x_end = new List <float>(); List <float> y_end = new List <float>(); List <float> x_left = new List <float>(); List <float> y_left = new List <float>(); List <float> x_right = new List <float>(); List <float> y_right = new List <float>(); float disparity = 7; float x1 = 640 / 2; float x2 = x1 - disparity; int grid_dimension = 2000; float focal_length = 5; float sensor_pixels_per_mm = 100; float baseline = 100; stereoModel inverseSensorModel = new stereoModel(); inverseSensorModel.image_width = 640; inverseSensorModel.image_height = 480; for (disparity = 15; disparity >= 15; disparity -= 5) { for (int example = 0; example < 640 / 40; example++) { x1 = example * 40; x2 = x1 - disparity; float distance = stereoModel.DisparityToDistance(disparity, focal_length, sensor_pixels_per_mm, baseline); float curr_x_start = 0; float curr_y_start = 0; float curr_x_end = 0; float curr_y_end = 0; float curr_x_left = 0; float curr_y_left = 0; float curr_x_right = 0; float curr_y_right = 0; inverseSensorModel.raysIntersection( x1, x2, grid_dimension, ray_uncertainty, distance, ref curr_x_start, ref curr_y_start, ref curr_x_end, ref curr_y_end, ref curr_x_left, ref curr_y_left, ref curr_x_right, ref curr_y_right); /* * curr_y_start = -curr_y_start; * curr_y_end = -curr_y_end; * curr_y_left = -curr_y_left; * curr_y_right = -curr_y_right; */ x_start.Add(curr_x_start); y_start.Add(curr_y_start); x_end.Add(curr_x_end); y_end.Add(curr_y_end); x_left.Add(curr_x_left); y_left.Add(curr_y_left); x_right.Add(curr_x_right); y_right.Add(curr_y_right); if (curr_x_end < min_x) { min_x = curr_x_end; } if (curr_x_end > max_x) { max_x = curr_x_end; } if (curr_x_left < min_x) { min_x = curr_x_left; } if (curr_x_right > max_x) { max_x = curr_x_right; } if (curr_y_start < min_y) { min_y = curr_y_start; } if (curr_y_end > max_y) { max_y = curr_y_end; } Console.WriteLine("curr_y_start: " + curr_y_start.ToString()); } } for (int i = 0; i < x_start.Count; i++) { float curr_x_start = (x_start[i] - min_x) * debug_img_width / (max_x - min_x); float curr_y_start = (y_start[i] - min_y) * debug_img_height / (max_y - min_y); float curr_x_end = (x_end[i] - min_x) * debug_img_width / (max_x - min_x); float curr_y_end = (y_end[i] - min_y) * debug_img_height / (max_y - min_y); float curr_x_left = (x_left[i] - min_x) * debug_img_width / (max_x - min_x); float curr_y_left = (y_left[i] - min_y) * debug_img_height / (max_y - min_y); float curr_x_right = (x_right[i] - min_x) * debug_img_width / (max_x - min_x); float curr_y_right = (y_right[i] - min_y) * debug_img_height / (max_y - min_y); curr_y_start = debug_img_height - 1 - curr_y_start; curr_y_end = debug_img_height - 1 - curr_y_end; curr_y_left = debug_img_height - 1 - curr_y_left; curr_y_right = debug_img_height - 1 - curr_y_right; //Console.WriteLine("max: " + max.ToString()); drawing.drawLine(debug_img, debug_img_width, debug_img_height, (int)curr_x_start, (int)curr_y_start, (int)curr_x_left, (int)curr_y_left, 0, 0, 0, 0, false); drawing.drawLine(debug_img, debug_img_width, debug_img_height, (int)curr_x_end, (int)curr_y_end, (int)curr_x_left, (int)curr_y_left, 0, 0, 0, 0, false); drawing.drawLine(debug_img, debug_img_width, debug_img_height, (int)curr_x_end, (int)curr_y_end, (int)curr_x_right, (int)curr_y_right, 0, 0, 0, 0, false); drawing.drawLine(debug_img, debug_img_width, debug_img_height, (int)curr_x_start, (int)curr_y_start, (int)curr_x_right, (int)curr_y_right, 0, 0, 0, 0, false); } BitmapArrayConversions.updatebitmap_unsafe(debug_img, bmp); bmp.Save("tests_occupancygrid_simple_RaysIntersection.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); }