public void update_state() { usage.Update("Update entry state " + svs_state.ToString() + ", SurveyorVisionStereo, update_state"); if ((camera[0].Running) && (camera[1].Running)) { TimeSpan diff; switch (svs_state) { case SVS_STATE_GRAB_IMAGES: { int time_step_mS = (int)(1000 / fps); diff = DateTime.Now.Subtract(svs_state_last); if (diff.TotalMilliseconds > time_step_mS) { svs_state_last = DateTime.Now; // enable embedded stereo TimeSpan diff2 = DateTime.Now.Subtract(embedded_last_enabled_disabled); if (enable_embedded) { if (diff2.TotalSeconds > EMBEDDED_TIMEOUT_SEC) { enable_embedded = false; } camera[0].Embedded = true; camera[1].Embedded = true; camera[0].EnableEmbeddedStereo(); //Console.WriteLine("Embedded stereo enabled"); } // disable embedded stereo if (disable_embedded) { if (diff2.TotalSeconds > EMBEDDED_TIMEOUT_SEC) { enable_embedded = false; } camera[0].Embedded = false; camera[1].Embedded = false; camera[0].DisableEmbeddedStereo(); //Console.WriteLine("Embedded stereo disabled"); } // pause or resume grabbing frames from the cameras bool is_paused = false; if (correspondence != null) { if ((!UpdateWhenClientsConnected) || ((UpdateWhenClientsConnected) && (correspondence.GetNoOfClients() > 0))) { is_paused = false; } else { is_paused = true; } } if ((Pause) || (is_paused)) { Console.WriteLine("Paused"); } if (((!is_paused) || (Pause)) && ((diff2.TotalSeconds > EMBEDDED_TIMEOUT_SEC) || (first_frame_request))) { // request images camera[0].RequestFrame(); camera[1].RequestFrame(); if ((camera[0].send_command != null) && (camera[0].send_command != "")) { svs_state = SVS_STATE_GRAB_IMAGES; } else { svs_state = SVS_STATE_RECEIVE_IMAGES; if (first_frame_request) { Console.WriteLine("Frames requested"); first_frame_request = false; } } } } break; } case SVS_STATE_RECEIVE_IMAGES: { // both frames arrived if ((camera[0].frame_arrived) && (camera[1].frame_arrived)) { bmp_state_left = (Bitmap)camera[0].current_frame; bmp_state_right = (Bitmap)camera[1].current_frame; if ((bmp_state_left != null) && (bmp_state_right != null)) { // proceed to process the images svs_state = SVS_STATE_PROCESS_IMAGES; } else { // images were invalid - try again svs_state = SVS_STATE_GRAB_IMAGES; } } else { int timeout_mS = 1000; diff = DateTime.Now.Subtract(svs_state_last); if (diff.TotalMilliseconds > timeout_mS) { // timed out - request images again Console.WriteLine("Timed out waiting for images"); svs_state = SVS_STATE_GRAB_IMAGES; } } break; } case SVS_STATE_PROCESS_IMAGES: { if (bmp_state_left != null) { image_width = bmp_state_left.Width; if (bmp_state_left != null) { image_height = bmp_state_left.Height; //busy_processing = true; if (calibration_pattern != null) { if (!show_left_image) { SurveyorCalibration.DetectDots(bmp_state_left, ref edge_detector, calibration_survey[0], ref edges, ref linked_dots, ref grid, ref grid_diff, ref rectified[0]); } else { SurveyorCalibration.DetectDots(bmp_state_right, ref edge_detector, calibration_survey[1], ref edges, ref linked_dots, ref grid, ref grid_diff, ref rectified[1]); } } RectifyImages(bmp_state_left, bmp_state_right); Process(bmp_state_left, bmp_state_right); // save images to file if (Record) { string path = ""; if ((recorded_images_path != null) && (recorded_images_path != "")) { if (recorded_images_path.EndsWith("/")) { path = recorded_images_path; } else { path = recorded_images_path + "/"; } } RecordFrameNumber++; DateTime t = DateTime.Now; LogEvent(t, "RAW_L " + stereo_camera_index.ToString() + " raw0_" + RecordFrameNumber.ToString() + ".jpg", image_log); LogEvent(t, "RAW_R " + stereo_camera_index.ToString() + " raw1_" + RecordFrameNumber.ToString() + ".jpg", image_log); bmp_state_left.Save(path + "raw0_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); bmp_state_right.Save(path + "raw1_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); if ((rectified[0] != null) && (rectified[0] != null)) { LogEvent(t, "REC_L " + stereo_camera_index.ToString() + " rectified0_" + RecordFrameNumber.ToString() + ".jpg", image_log); LogEvent(t, "REC_R " + stereo_camera_index.ToString() + " rectified1_" + RecordFrameNumber.ToString() + ".jpg", image_log); rectified[0].Save(path + "rectified0_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); rectified[1].Save(path + "rectified1_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); } } } } svs_state = SVS_STATE_GRAB_IMAGES; break; } } if (prev_svs_state != svs_state) { string msg = ""; switch (svs_state) { case SVS_STATE_GRAB_IMAGES: { msg = "grab images"; break; } case SVS_STATE_RECEIVE_IMAGES: { msg = "images received"; break; } case SVS_STATE_PROCESS_IMAGES: { msg = "process images"; break; } } //if (Verbose) //Console.WriteLine(msg); } if (svs_state != prev_svs_state) { //Console.WriteLine("svs_state = " + svs_state.ToString()); } prev_svs_state = svs_state; } usage.Update("Update exit state " + svs_state.ToString() + ", SurveyorVisionStereo, update_state"); //usage.ExportAsDot("debug.dot", true, true); }
protected void GrabWindows() { string filename = "capture"; // append temporary files path if specified if ((temporary_files_path != null) && (temporary_files_path != "")) { if (temporary_files_path.EndsWith("\\")) { filename = temporary_files_path + filename; } else { filename = temporary_files_path + "\\" + filename; } } // Extract numbers from the camera device names // This is ised to uniquely identify devices so that // potentially more than one stereo camera could be running // at the same time string identifier = ""; for (int cam = 0; cam < 2; cam++) { char[] ch = camera_device[cam].ToCharArray(); for (int i = 0; i < ch.Length; i++) { if ((ch[i] >= '0') && (ch[i] <= '9')) { identifier += ch[i]; } } } filename += identifier; if (dsCameras == null) { dsCameras = new WebcamVisionDirectShow(); dsCameras.camera_devices = camera_device[0] + "," + camera_device[1]; if (image_width > 0) { dsCameras.image_width = image_width; dsCameras.image_height = image_height; } dsCameras.Open(); } // set the camera index dsCameras.stereo_camera_index = stereo_camera_index; // whether to use pause or stop on the media control dsCameras.use_pause = use_media_pause; // acquire new images dsCameras.Grab(); // set exposure dsCameras.exposure = exposure; // define exposure range for the camera dsCameras.min_exposure = min_exposure; dsCameras.max_exposure = max_exposure; if ((dsCameras.left_image_bitmap != null) && (dsCameras.right_image_bitmap != null)) { // grab the data from the captured images Bitmap[] bmp = new Bitmap[2]; for (int cam = 0; cam < 2; cam++) { try { if (cam == 0) { bmp[cam] = dsCameras.left_image_bitmap; } else { bmp[cam] = dsCameras.right_image_bitmap; } } catch { bmp[cam] = null; } if (bmp[cam] == null) { break; } try { image_width = bmp[cam].Width; image_height = bmp[cam].Height; } catch { bmp[cam] = null; } //byte[] raw_image_data = new byte[image_width * image_height * 3]; //BitmapArrayConversions.updatebitmap(bmp[cam], raw_image_data); } if ((bmp[0] != null) && (bmp[1] != null)) { if (calibration_pattern != null) { if (!show_left_image) { SurveyorCalibration.DetectDots(bmp[0], ref edge_detector, calibration_survey[0], ref edges, ref linked_dots, ref grid, ref grid_diff, ref rectified[0]); } else { SurveyorCalibration.DetectDots(bmp[1], ref edge_detector, calibration_survey[1], ref edges, ref linked_dots, ref grid, ref grid_diff, ref rectified[1]); } } RectifyImages(bmp[0], bmp[1]); Process(bmp[0], bmp[1]); // save images to file if (Record) { string path = ""; if ((recorded_images_path != null) && (recorded_images_path != "")) { if (recorded_images_path.EndsWith("\\")) { path = recorded_images_path; } else { path = recorded_images_path + "\\"; } } RecordFrameNumber++; DateTime t = DateTime.Now; LogEvent(t, "RAW_L " + stereo_camera_index.ToString() + " raw" + identifier + "_0_" + RecordFrameNumber.ToString() + ".jpg", image_log); LogEvent(t, "RAW_R " + stereo_camera_index.ToString() + " raw" + identifier + "_1_" + RecordFrameNumber.ToString() + ".jpg", image_log); bmp[0].Save(path + "raw" + identifier + "_0_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); bmp[1].Save(path + "raw" + identifier + "_1_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); if ((rectified[0] != null) && (rectified[0] != null)) { LogEvent(t, "REC_L " + stereo_camera_index.ToString() + " rectified" + identifier + "_0_" + RecordFrameNumber.ToString() + ".jpg", image_log); LogEvent(t, "REC_R " + stereo_camera_index.ToString() + " rectified" + identifier + "_1_" + RecordFrameNumber.ToString() + ".jpg", image_log); rectified[0].Save(path + "rectified" + identifier + "_0_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); rectified[1].Save(path + "rectified" + identifier + "_1_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); } } } else { for (int i = 0; i < 2; i++) { if (bmp[i] == null) { Console.WriteLine("Warning: Did not acquire image from " + camera_device[i]); } } } try { if (bmp[0] != null) { bmp[0].Dispose(); } if (bmp[1] != null) { bmp[1].Dispose(); } } catch { } } if (next_camera != null) { active_camera = false; Pause(); next_camera.active_camera = true; next_camera.Resume(); } }
protected void GrabLinux() { string filename = "capture"; // append temporary files path if specified if ((temporary_files_path != null) && (temporary_files_path != "")) { if (temporary_files_path.EndsWith("/")) { filename = temporary_files_path + filename; } else { filename = temporary_files_path + "/" + filename; } } // Extract numbers from the camera device names // This is ised to uniquely identify devices so that // potentially more than one stereo camera could be running // at the same time string identifier = ""; for (int cam = 0; cam < 2; cam++) { char[] ch = camera_device[cam].ToCharArray(); for (int i = 0; i < ch.Length; i++) { if ((ch[i] >= '0') && (ch[i] <= '9')) { identifier += ch[i]; } } } filename += identifier; string command_str = "fswebcam -q -d " + camera_device[0] + "," + camera_device[1]; command_str += " -r " + image_width.ToString() + "x" + image_height.ToString(); command_str += " --no-banner"; command_str += " -S " + skip_frames.ToString(); if (exposure > 0) { command_str += " -s brightness=" + exposure.ToString() + "%"; } command_str += " --save " + filename + "_.jpg"; // fswebcam -q -d /dev/video1,/dev/video2 -r 320x240 --no-banner -S 2 -s brightness=50% --save capture12_.jpg if (capture_utility == "v4l2stereo") { command_str = "v4l2stereo --dev0 " + camera_device[0] + " "; command_str += "--dev1 " + camera_device[1] + " "; if (flip_left_image) { command_str += "--flipleft "; } if (flip_right_image) { command_str += "--flipright "; } command_str += "--save " + filename + "_ "; } Console.WriteLine(""); Console.WriteLine(""); Console.WriteLine(command_str); Console.WriteLine(""); Console.WriteLine(""); string left_image_filename = filename + "_0.jpg"; string right_image_filename = filename + "_1.jpg"; // delete any existing images for (int cam = 0; cam < 2; cam++) { if (File.Exists(filename + "_" + cam.ToString() + ".jpg")) { try { File.Delete(filename + "_" + cam.ToString() + ".jpg"); } catch { } } } bool command_succeeded = false; Process proc = new Process(); proc.EnableRaisingEvents = false; proc.StartInfo.FileName = command_str; try { proc.Start(); command_succeeded = true; } catch { } if (!command_succeeded) { Console.WriteLine("Command failed. fswebcam may not be installed."); } else { //proc.WaitForExit(); proc.Close(); // wait for the file to appear const int timeout_secs = 10; DateTime start_time = DateTime.Now; int seconds_elapsed = 0; while (((!File.Exists(left_image_filename)) || (!File.Exists(right_image_filename))) && (seconds_elapsed < timeout_secs)) { System.Threading.Thread.Sleep(50); TimeSpan diff = DateTime.Now.Subtract(start_time); seconds_elapsed = (int)diff.TotalSeconds; } if ((File.Exists(left_image_filename)) && (File.Exists(right_image_filename))) { // grab the data from the captured images Bitmap[] bmp = new Bitmap[2]; for (int cam = 0; cam < 2; cam++) { try { bmp[cam] = (Bitmap)Bitmap.FromFile(filename + "_" + cam.ToString() + ".jpg"); } catch { bmp[cam] = null; } if (bmp[cam] == null) { break; } image_width = bmp[cam].Width; image_height = bmp[cam].Height; byte[] raw_image_data = new byte[image_width * image_height * 3]; BitmapArrayConversions.updatebitmap(bmp[cam], raw_image_data); } if ((bmp[0] != null) && (bmp[1] != null)) { if (calibration_pattern != null) { if (!show_left_image) { SurveyorCalibration.DetectDots(bmp[0], ref edge_detector, calibration_survey[0], ref edges, ref linked_dots, ref grid, ref grid_diff, ref rectified[0]); } else { SurveyorCalibration.DetectDots(bmp[1], ref edge_detector, calibration_survey[1], ref edges, ref linked_dots, ref grid, ref grid_diff, ref rectified[1]); } } RectifyImages(bmp[0], bmp[1]); Process(bmp[0], bmp[1]); // save images to file if (Record) { string path = ""; if ((recorded_images_path != null) && (recorded_images_path != "")) { if (recorded_images_path.EndsWith("/")) { path = recorded_images_path; } else { path = recorded_images_path + "/"; } } RecordFrameNumber++; DateTime t = DateTime.Now; LogEvent(t, "RAW_L " + stereo_camera_index.ToString() + " raw" + identifier + "_0_" + RecordFrameNumber.ToString() + ".jpg", image_log); LogEvent(t, "RAW_R " + stereo_camera_index.ToString() + " raw" + identifier + "_1_" + RecordFrameNumber.ToString() + ".jpg", image_log); bmp[0].Save(path + "raw" + identifier + "_0_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); bmp[1].Save(path + "raw" + identifier + "_1_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); if ((rectified[0] != null) && (rectified[0] != null)) { LogEvent(t, "REC_L " + stereo_camera_index.ToString() + " rectified" + identifier + "_0_" + RecordFrameNumber.ToString() + ".jpg", image_log); LogEvent(t, "REC_R " + stereo_camera_index.ToString() + " rectified" + identifier + "_1_" + RecordFrameNumber.ToString() + ".jpg", image_log); rectified[0].Save(path + "rectified" + identifier + "_0_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); rectified[1].Save(path + "rectified" + identifier + "_1_" + RecordFrameNumber.ToString() + ".jpg", System.Drawing.Imaging.ImageFormat.Jpeg); } } } else { for (int i = 0; i < 2; i++) { if (bmp[i] == null) { Console.WriteLine("Warning: Did not acquire image from " + camera_device[i]); } } } } else { if (!File.Exists(left_image_filename)) { Console.WriteLine("Timed out: File not found " + left_image_filename + "."); } if (!File.Exists(right_image_filename)) { Console.WriteLine("Timed out: File not found " + right_image_filename + "."); } } } if (next_camera != null) { active_camera = false; Pause(); next_camera.active_camera = true; next_camera.Resume(); } }