private void update() { // load calibration data stereointerface.loadCalibration(calibration_filename); // get image data from bitmaps int bytes_per_pixel = 3; // load images into the correspondence object stereointerface.loadImage(fullres_left, image_width, image_height, true, bytes_per_pixel); stereointerface.loadImage(fullres_right, image_width, image_height, false, bytes_per_pixel); // set the quality of the disparity map stereointerface.setDisparityMapCompression(horizontal_compression, vertical_compression); clock.Start(); // perform stereo matching stereointerface.stereoMatchRun(0, 8, correspondence_algorithm_type); long correspondence_time_mS = clock.Stop(); txtStereoCorrespondenceTime.Buffer.Text = correspondence_time_mS.ToString(); // make a bitmap byte[] depthmap = new byte[image_width * image_height * 3]; stereointerface.getDisparityMap(depthmap, image_width, image_height, 0); picDepthMap.Pixbuf = GtkBitmap.createPixbuf(image_width, image_height); GtkBitmap.setBitmap(depthmap, picDepthMap); }
private void SaveImages( string left_image_filename, string right_image_filename) { byte[] left = new byte[image_width * image_height * 3]; byte[] right = new byte[image_width * image_height * 3]; GtkBitmap.getBitmap(leftimage, left); GtkBitmap.getBitmap(rightimage, right); Bitmap left_bmp = new Bitmap(image_width, image_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); Bitmap right_bmp = new Bitmap(image_width, image_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); BitmapArrayConversions.updatebitmap_unsafe(left, left_bmp); BitmapArrayConversions.updatebitmap_unsafe(right, right_bmp); if (left_image_filename.ToLower().EndsWith("png")) { left_bmp.Save(left_image_filename, System.Drawing.Imaging.ImageFormat.Png); right_bmp.Save(right_image_filename, System.Drawing.Imaging.ImageFormat.Png); } if (left_image_filename.ToLower().EndsWith("bmp")) { left_bmp.Save(left_image_filename, System.Drawing.Imaging.ImageFormat.Bmp); right_bmp.Save(right_image_filename, System.Drawing.Imaging.ImageFormat.Bmp); } if (left_image_filename.ToLower().EndsWith("gif")) { left_bmp.Save(left_image_filename, System.Drawing.Imaging.ImageFormat.Gif); right_bmp.Save(right_image_filename, System.Drawing.Imaging.ImageFormat.Gif); } if (left_image_filename.ToLower().EndsWith("jpg")) { left_bmp.Save(left_image_filename, System.Drawing.Imaging.ImageFormat.Jpeg); right_bmp.Save(right_image_filename, System.Drawing.Imaging.ImageFormat.Jpeg); } }
private bool LoadStereoImages() { bool success = false; string left_image_filename = images_directory + "/test_left_" + stereo_image_index.ToString() + ".jpg"; string right_image_filename = images_directory + "/test_right_" + stereo_image_index.ToString() + ".jpg"; if (System.IO.File.Exists(left_image_filename)) { if (System.IO.File.Exists(right_image_filename)) { fullres_left = GtkBitmap.Load(left_image_filename, ref image_width, ref image_height); picLeftImage.Pixbuf = GtkBitmap.createPixbuf(image_width, image_height); GtkBitmap.setBitmap(fullres_left, picLeftImage); fullres_right = GtkBitmap.Load(right_image_filename, ref image_width, ref image_height); picRightImage.Pixbuf = GtkBitmap.createPixbuf(image_width, image_height); GtkBitmap.setBitmap(fullres_right, picRightImage); success = true; } else { //MessageBox.Show("Could not find image " + right_image_filename); } } else { //MessageBox.Show("Could not find image " + left_image_filename); } return(success); }
private void DisplayImage(Gtk.Image img, Bitmap default_image, bool is_left) { usage.Update("Display Image, SurveyorVisionStereoGtk, DisplayImage"); Bitmap disp_image = null; switch (display_type) { case DISPLAY_RAW: { disp_image = default_image; break; } case DISPLAY_CALIBRATION_DOTS: { disp_image = edges; break; } case DISPLAY_CALIBRATION_GRID: { disp_image = linked_dots; break; } case DISPLAY_CALIBRATION_DIFF: { disp_image = grid_diff; break; } case DISPLAY_RECTIFIED: { if (is_left) { disp_image = rectified[0]; } else { disp_image = rectified[1]; } break; } } if ((calibration_pattern == null) || (disp_image == null)) { disp_image = default_image; } if (calibration_pattern == null) { if (is_left) { if (calibration_map[0] != null) { if (stereo_features == null) { disp_image = rectified[0]; } else { disp_image = stereo_features; } } } else { if (calibration_map[1] != null) { disp_image = rectified[1]; } } } if ((img != null) && (disp_image != null)) { GtkBitmap.setBitmap(disp_image, img); } }
/// <summary> /// perform a test /// </summary> /// <param name="test_index">index number of the test to be performed</param> private void PerformTest(int test_index) { // initialise init(); switch (test_index) { case 0: // motion model { test_motion_model(true); break; } case 1: // stereo ray models { bool mirror = false; stereo_model.showProbabilities(grid_layer, grid_dimension, img_rays, standard_width, standard_height, false, true, mirror); break; } case 2: // gaussian distribution function { stereo_model.showDistribution(img_rays, standard_width, standard_height); break; } case 3: // a single stereo ray model { createSingleRayModel(false); break; } case 4: // sensor model lookups { createSensorModelLookup(); break; } case 5: // path planner { test_path_planner(200, 50); break; } } // make a larger image byte[] img_large = image.downSample(img_rays, standard_width, standard_height, standard_width * 2, standard_height * 2); // display the results imgOutput.Pixbuf = GtkBitmap.createPixbuf(standard_width * 2, standard_height * 2); GtkBitmap.setBitmap(img_large, imgOutput); }
/// <summary> /// /// </summary> /// <param name="stereo_camera_IP">IP address of the SVS</param> /// <param name="leftport">port number of the left camera</param> /// <param name="rightport">port number of the right camera</param> public MainWindow( string stereo_camera_IP, int leftport, int rightport) : base(Gtk.WindowType.Toplevel) { Build(); robot = new robotSurveyor(); this.stereo_camera_IP = stereo_camera_IP; string version = String.Format("{0}", AssemblyVersion); Console.WriteLine("surveyorstereo GUI version " + version); Console.WriteLine("SVS IP: " + stereo_camera_IP.ToString()); Console.WriteLine("Left camera port: " + leftport.ToString()); Console.WriteLine("Right camera port: " + rightport.ToString()); //SaveHpolarLookup(); if (BaseVisionStereo.IsWindows()) { zip_utility = "zip"; } byte[] img = new byte[image_width * image_height * 3]; Bitmap left_bmp = new Bitmap(image_width, image_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); Bitmap right_bmp = new Bitmap(image_width, image_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); sluggish.utilities.BitmapArrayConversions.updatebitmap_unsafe(img, left_bmp); sluggish.utilities.BitmapArrayConversions.updatebitmap_unsafe(img, right_bmp); GtkBitmap.setBitmap(left_bmp, leftimage); GtkBitmap.setBitmap(right_bmp, rightimage); stereo_camera = new SurveyorVisionStereoGtk(stereo_camera_IP, leftport, rightport, broadcast_port, fps, this); stereo_camera.temporary_files_path = temporary_files_path; stereo_camera.recorded_images_path = recorded_images_path; stereo_camera.display_image[0] = leftimage; stereo_camera.display_image[1] = rightimage; stereo_camera.Run("M"); txtLogging.Text = path_identifier; txtReplay.Text = replay_path_identifier; motors_active = true; starting = false; // enable motors //SendCommand("M"); }
/// <summary> /// initialise variables prior to performing test routines /// </summary> private void init() { grid_layer = new float[grid_dimension, grid_dimension, 3]; pos3D_x = new float[4]; pos3D_y = new float[4]; stereo_model = new stereoModel(); robot_head = new stereoHead(4); stereo_features = new float[900]; stereo_uncertainties = new float[900]; img_rays = new byte[standard_width * standard_height * 3]; imgOutput.Pixbuf = GtkBitmap.createPixbuf(standard_width, standard_height); GtkBitmap.setBitmap(img_rays, imgOutput); }
public MainWindow() : base(Gtk.WindowType.Toplevel) { Build(); byte[] img = new byte[image_width * image_height * 3]; Bitmap left_bmp = new Bitmap(image_width, image_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); Bitmap right_bmp = new Bitmap(image_width, image_height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); BitmapArrayConversions.updatebitmap_unsafe(img, left_bmp); BitmapArrayConversions.updatebitmap_unsafe(img, right_bmp); GtkBitmap.setBitmap(left_bmp, leftimage); GtkBitmap.setBitmap(right_bmp, rightimage); stereo_camera = new WebcamVisionStereoGtk(left_camera_device, right_camera_device, broadcast_port, fps); stereo_camera.temporary_files_path = temporary_files_path; stereo_camera.recorded_images_path = recorded_images_path; stereo_camera.window = this; stereo_camera.display_image[0] = leftimage; stereo_camera.display_image[1] = rightimage; //stereo_camera.Load(calibration_filename); stereo_camera.image_width = image_width; stereo_camera.image_height = image_height; stereo_camera.min_exposure = minimum_brightness; stereo_camera.max_exposure = maximum_brightness; stereo_camera.flip_left_image = flip_left_image; stereo_camera.flip_right_image = flip_right_image; stereo_camera.baseline_mm = baseline_mm; stereo_camera.Run(); stereo_camera.disable_rectification = disable_rectification; stereo_camera.disable_radial_correction = disable_radial_correction; chkRectification.Active = !disable_rectification; chkRadialCorrection.Active = !disable_radial_correction; chkFlipLeft.Active = stereo_camera.flip_left_image; chkFlipRight.Active = stereo_camera.flip_right_image; txtBaseline.Text = stereo_camera.baseline_mm.ToString(); }
private void ShowDotPattern(Gtk.Image dest_img) { stereo_camera.calibration_pattern = SurveyorCalibration.CreateDotPattern(image_width, image_height, SurveyorCalibration.dots_across, SurveyorCalibration.dot_radius_percent); GtkBitmap.setBitmap(stereo_camera.calibration_pattern, dest_img); }
/// <summary> /// shows both left and right camera images within the GUI /// </summary> /// <param name="left_image">left image bitmap</param> /// <param name="right_image">right image bitmap</param> protected override void DisplayImages(Bitmap left_image, Bitmap right_image) { usage.Update("Display Images, SurveyorVisionStereoGtk, DisplayImages"); if (display_image[0] != null) { if ((show_left_image) && (calibration_pattern != null)) { try { GtkBitmap.setBitmap(calibration_pattern, display_image[0], ref buffer); } catch { } if (calibration_survey != null) { CalibrationSurvey survey = calibration_survey[1]; if (survey != null) { if ((survey.minimum_rms_error < 3) && ((prev_minimum_rms_error >= 3) || (prev_minimum_rms_error == 0))) { PlaySound("beep.wav"); } prev_minimum_rms_error = survey.minimum_rms_error; } } } else { DisplayImage(display_image[0], left_image, true); } } if (display_image[1] != null) { if ((!show_left_image) && (calibration_pattern != null)) { try { GtkBitmap.setBitmap(calibration_pattern, display_image[1], ref buffer); } catch { } if (calibration_survey != null) { CalibrationSurvey survey = calibration_survey[0]; if (survey != null) { if ((survey.minimum_rms_error < 3) && ((prev_minimum_rms_error >= 3) || (prev_minimum_rms_error == 0))) { PlaySound("beep.wav"); } prev_minimum_rms_error = survey.minimum_rms_error; } } } else { DisplayImage(display_image[1], right_image, false); } } if (window != null) { if (!IsWindows()) { // Here we need to update the GUI after receiving the right camera image // Since we're running in a separate thread from the GUI we have to // call it in a special way RunOnMainThread.Run(this, "UpdateGUI", new object[] { window, calibration_window }); Gdk.Threads.Enter(); window.QueueDraw(); Gdk.Threads.Leave(); } } }