/// <summary> /// main update routine for contour based stereo correspondence /// </summary> /// <param name="left_bmp">left image data</param> /// <param name="right_bmp">right image data</param> /// <param name="wdth">width of the images</param> /// <param name="hght">height of the images</param> /// <param name="calibration_offset_x">calibration offset to counter for any small vergence angle between the cameras</param> /// <param name="calibration_offset_y">calibration offset to counter for any small vergence angle between the cameras</param> /// <param name="reset_attention">reset the attention map</param> public void update(Byte[] left_bmp, Byte[] right_bmp, int wdth, int hght, float calibration_offset_x, float calibration_offset_y, bool reset_attention) { int scale, idx; int x, y, x2; if ((wavepoints_left == null) || (vertical_compression != prev_vertical_compression) || (disparity_map_compression != prev_disparity_map_compression)) { // create image objects to store the left and right camera data img_left = new classimage(); img_left.createImage(wdth, hght / vertical_compression); img_right = new classimage(); img_right.createImage(wdth, hght / vertical_compression); wavepoints_left = new float[hght / vertical_compression][][]; wavepoints_right = new float[hght / vertical_compression][][]; wavepoints_left_scale = new byte[hght / vertical_compression][]; wavepoints_left_pattern = new byte[hght / vertical_compression][]; wavepoints_right_scale = new byte[hght / vertical_compression][]; wavepoints_right_pattern = new byte[hght / vertical_compression][]; for (int i = 0; i < wavepoints_left.Length; i++) { wavepoints_left[i] = new float[wdth / step_size][]; wavepoints_right[i] = new float[wdth / step_size][]; wavepoints_left_scale[i] = new byte[wdth / step_size]; wavepoints_left_pattern[i] = new byte[wdth / step_size]; wavepoints_right_scale[i] = new byte[wdth / step_size]; wavepoints_right_pattern[i] = new byte[wdth / step_size]; for (int j = 0; j < wavepoints_left[i].Length; j++) { wavepoints_left[i][j] = new float[3]; wavepoints_right[i][j] = new float[3]; } } scalepoints_left = new int[no_of_scales][]; scalepoints_right = new int[no_of_scales][]; scalepoints_lookup = new int[no_of_scales][][]; for (int i = 0; i < no_of_scales; i++) { scalepoints_left[i] = new int[wdth + 1]; scalepoints_right[i] = new int[wdth + 1]; scalepoints_lookup[i] = new int[wdth][]; for (int j = 0; j < scalepoints_lookup[i].Length; j++) { scalepoints_lookup[i][j] = new int[wdth + 1]; } } // create an attention map attention_map = new bool[wdth, hght]; resetAttention(wdth, hght); int w = (wdth / (step_size * disparity_map_compression)) + 1; int h = (hght / (vertical_compression * disparity_map_compression)) + 1; disparity_map = new float[w][]; disparity_hits = new float[w][]; for (int i = 0; i < w; i++) { disparity_map[i] = new float[h]; disparity_hits[i] = new float[h]; } scale_width = new int[no_of_scales][]; int sc = 2; for (int s = 0; s < no_of_scales; s++) { scale_width[s] = new int[2]; scale_width[s][0] = (int)(wdth * surround_radius_percent * sc / 100); if (scale_width[s][0] < 2) { scale_width[s][0] = 2; } scale_width[s][1] = (int)((hght / vertical_compression) * surround_radius_percent * sc / 100); if (scale_width[s][1] < 2) { scale_width[s][1] = 2; } sc++; } } if (reset_attention) { resetAttention(wdth, hght); } // store compression values so that changes in these // values can be detected prev_vertical_compression = vertical_compression; prev_disparity_map_compression = disparity_map_compression; // set the images left_image = left_bmp; img_left.updateFromBitmapVerticalCompression(left_bmp, wdth, hght, vertical_compression, 0, 0); img_right.updateFromBitmapVerticalCompression(right_bmp, wdth, hght, vertical_compression, (int)calibration_offset_x, (int)calibration_offset_y); // update integrals img_left.updateIntegralImage(); img_right.updateIntegralImage(); // update average intensities for each row and column img_left.updateAverages(); img_right.updateAverages(); // disparity map dimensions int compressed_wdth = wdth / (step_size * disparity_map_compression); int compressed_hght = hght / (vertical_compression * disparity_map_compression); // clear the disparity map clearDisparityMap(compressed_wdth, compressed_hght); // update blobs on multiple scales for (scale = 0; scale < no_of_scales; scale++) { // get x and y radius for this scale int surround_pixels_x = scale_width[scale][0]; int surround_pixels_y = scale_width[scale][1]; // detect blobs at this scale img_left.detectBlobs(scale, surround_pixels_x, surround_pixels_y, step_size, wavepoints_left, wavepoints_left_scale, wavepoints_left_pattern); img_right.detectBlobs(scale, surround_pixels_x, surround_pixels_y, step_size, wavepoints_right, wavepoints_right_scale, wavepoints_right_pattern); } // update the scale points for fast searching float min_thresh = 5.0f; float min_grad = 0.5f; float left_diff, right_diff; float prev_left_diff = 0, prev_right_diff = 0; float prev_left_grad = 0, prev_right_grad = 0; float left_grad = 0, right_grad = 0; int max_disp = max_disparity * (wdth / step_size) / 100; int searchfactor = 4; int max_disp2 = max_disp / searchfactor; int max_wdth = wdth / searchfactor; int max_vertical_edge_difference = hght / 4; // assorted variables int no_of_points_left, no_of_points_right; int disp, x_left, vertical_left, x_left2, x_left3, no_of_candidates; int prev_pattern_left, next_pattern_left, idx2; int x_right, vertical_right, x_right2, x_right3, dx, prev_pattern_right, next_pattern_right; float diff_left, diff_row_left, diff_col_left, min_response_difference; float confidence, diff_right, response_difference; // for each row of the image for (y = 0; y < hght / vertical_compression; y++) { for (int sign = 0; sign < 8; sign++) { // go through each detection pattern // at present there are only two patterns: centre/surround and left/right for (int currPattern = PATTERN_CENTRE_SURROUND; currPattern <= PATTERN_LEFT_RIGHT; currPattern++) { // clear the number of points for (scale = 0; scale < no_of_scales; scale++) { scalepoints_left[scale][0] = 0; scalepoints_right[scale][0] = 0; for (x = 0; x < max_wdth; x++) { scalepoints_lookup[scale][x][0] = 0; } } int ww = wdth / step_size; for (x = 0; x < ww; x++) { int pattern = wavepoints_left_pattern[y][x]; if (pattern == currPattern) { // response value left_diff = wavepoints_left[y][x][0]; right_diff = wavepoints_right[y][x][0]; if ((x > 0) && ((left_diff != 0) || (right_diff != 0))) { float left_row_diff = wavepoints_left[y][x][1]; float right_row_diff = wavepoints_right[y][x][1]; // gradient - change in response along the row left_grad = left_diff - prev_left_diff; right_grad = right_diff - prev_right_diff; if (((left_row_diff > 0) && (right_row_diff > 0)) || ((left_row_diff < 0) && (right_row_diff < 0))) { float left_col_diff = wavepoints_left[y][x][2]; float right_col_diff = wavepoints_right[y][x][2]; if (((left_col_diff >= 0) && (right_col_diff >= 0)) || ((left_col_diff < 0) && (right_col_diff < 0))) { float left_horizontal_grad_change = left_grad - prev_left_grad; float right_horizontal_grad_change = right_grad - prev_right_grad; if ((left_diff != 0) && ((left_grad < -min_grad) || (left_grad > min_grad))) { // combiantions of response and gradient directions if (((sign == 0) && (left_diff > min_thresh) && (left_grad > 0) && (left_horizontal_grad_change > 0)) || ((sign == 1) && (left_diff < -min_thresh) && (left_grad > 0) && (left_horizontal_grad_change > 0)) || ((sign == 2) && (left_diff > min_thresh) && (left_grad <= 0) && (left_horizontal_grad_change > 0)) || ((sign == 3) && (left_diff < -min_thresh) && (left_grad <= 0) && (left_horizontal_grad_change > 0)) || ((sign == 4) && (left_diff > min_thresh) && (left_grad > 0) && (left_horizontal_grad_change <= 0)) || ((sign == 5) && (left_diff < -min_thresh) && (left_grad > 0) && (left_horizontal_grad_change <= 0)) || ((sign == 6) && (left_diff > min_thresh) && (left_grad <= 0) && (left_horizontal_grad_change <= 0)) || ((sign == 7) && (left_diff < -min_thresh) && (left_grad <= 0) && (left_horizontal_grad_change <= 0)) ) { // what is the best responding scale ? scale = wavepoints_left_scale[y][x]; // get the current index idx = scalepoints_left[scale][0] + 1; // set the x position scalepoints_left[scale][idx] = x; // increment the index scalepoints_left[scale][0]++; } } if ((right_diff != 0) && ((right_grad < -min_grad) || (right_grad > min_grad))) { // combiantions of response and gradient directions if (((sign == 0) && (right_diff > min_thresh) && (right_grad > 0) && (right_horizontal_grad_change > 0)) || ((sign == 1) && (right_diff < -min_thresh) && (right_grad > 0) && (right_horizontal_grad_change > 0)) || ((sign == 2) && (right_diff > min_thresh) && (right_grad <= 0) && (right_horizontal_grad_change > 0)) || ((sign == 3) && (right_diff < -min_thresh) && (right_grad <= 0) && (right_horizontal_grad_change > 0)) || ((sign == 4) && (right_diff > min_thresh) && (right_grad > 0) && (right_horizontal_grad_change <= 0)) || ((sign == 5) && (right_diff < -min_thresh) && (right_grad > 0) && (right_horizontal_grad_change <= 0)) || ((sign == 6) && (right_diff > min_thresh) && (right_grad <= 0) && (right_horizontal_grad_change <= 0)) || ((sign == 7) && (right_diff < -min_thresh) && (right_grad <= 0) && (right_horizontal_grad_change <= 0)) ) { scale = wavepoints_right_scale[y][x]; // get the current index idx = scalepoints_right[scale][0] + 1; // set the x position scalepoints_right[scale][idx] = x; // increment the index scalepoints_right[scale][0]++; x2 = x / searchfactor; for (int xx = x2; xx < x2 + max_disp2; xx++) { if ((xx > -1) && (xx < max_wdth)) { idx2 = scalepoints_lookup[scale][xx][0] + 1; scalepoints_lookup[scale][xx][idx2] = idx; scalepoints_lookup[scale][xx][0]++; } } } } } } } // record previous responses prev_left_grad = left_grad; prev_right_grad = right_grad; prev_left_diff = left_diff; prev_right_diff = right_diff; } } // stereo match for (scale = no_of_scales - 1; scale >= 0; scale--) { no_of_points_left = scalepoints_left[scale][0]; no_of_points_right = scalepoints_right[scale][0]; //for each possible match in the left image for (int i = no_of_points_left - 1; i >= 0; i--) { disp = -1; // get the position and response magnitude of the left point x_left = scalepoints_left[scale][i + 1]; vertical_left = img_left.column_maximal_edge[x_left]; diff_left = wavepoints_left[y][x_left][0]; diff_row_left = wavepoints_left[y][x_left][1]; diff_col_left = wavepoints_left[y][x_left][2]; x_left2 = x_left - 2; if (x_left2 < 0) { x_left2 = 0; } x_left3 = x_left + 2; if (x_left3 >= ww) { x_left3 = ww - 1; } prev_pattern_left = wavepoints_left_pattern[y][x_left2]; next_pattern_left = wavepoints_left_pattern[y][x_left3]; min_response_difference = match_threshold; x2 = x_left / searchfactor; no_of_candidates = scalepoints_lookup[scale][x2][0]; // for each possible match in the right image // note here that we scan from right to left for (int j = no_of_candidates - 1; j >= 0; j--) { idx2 = scalepoints_lookup[scale][x2][j + 1]; // get the horizontal position of the possible match in the right image x_right = scalepoints_right[scale][idx2]; // what's the disparity ? dx = x_left - x_right; // is the disparity in the range we expect ? if ((dx > -1) && (dx < max_disp)) { // vertical context checking vertical_right = img_left.column_maximal_edge[x_right]; int dv = vertical_left - vertical_right; if (dv < 0) { dv = -dv; } // is the vertical context within tollerance ? if (dv < max_vertical_edge_difference) { // check the ordering of patterns x_right2 = x_right - 2; if (x_right2 < 0) { x_right2 = 0; } prev_pattern_right = wavepoints_right_pattern[y][x_right2]; if (prev_pattern_left == prev_pattern_right) { x_right3 = x_right + 2; if (x_right3 >= ww) { x_right3 = ww - 1; } next_pattern_right = wavepoints_right_pattern[y][x_right3]; if (next_pattern_left == next_pattern_right) { // check the response magnitude difference diff_right = wavepoints_right[y][x_right][0]; response_difference = diff_right - diff_left; if (response_difference < 0) { response_difference = -response_difference; } response_difference *= dv; // is the magnitude difference the best that we've found so far ? ? if (response_difference < min_response_difference) { // record the disparity and minimum difference disp = dx; min_response_difference = response_difference; } } } } } // if the horizontal difference is too large then we may // as well abandon the search if (dx > max_disp) { break; } } if (disp > -1) { // how confident are we in this match ? confidence = 1.0f - (min_response_difference / match_threshold); confidence /= (no_of_scales - scale); confidence *= confidence; // get the position on the disparity map int mx = (x_left + disp) / disparity_map_compression; int my = y / disparity_map_compression; // update the dispalrity map using a gaussian // probability distribution updateDisparityMap(mx, my, compressed_wdth, compressed_hght, scale, disp * step_size, confidence); } } } } } } // update disparity map float disparity_value; for (y = compressed_hght; y >= 0; y--) { for (x = compressed_wdth - 1; x >= 0; x--) { disparity_value = disparity_map[x][y]; if (disparity_value < 0) { disparity_value = 0; } else { disparity_value /= disparity_hits[x][y]; disparity_map[x][y] = disparity_value; } } } // get a fixed quantity of features which may // subsequently be used to create ray models getSelectedFeatures(wdth, hght); }
/// <summary> /// main update routine for contour based stereo correspondence /// </summary> /// <param name="left_bmp">left image data</param> /// <param name="right_bmp">right image data</param> /// <param name="wdth">width of the images</param> /// <param name="hght">height of the images</param> /// <param name="calibration_offset_x">calibration offset to counter for any small vergence angle between the cameras</param> /// <param name="calibration_offset_y">calibration offset to counter for any small vergence angle between the cameras</param> /// <param name="reset_attention">reset the attention map</param> public void update(Byte[] left_bmp, Byte[] right_bmp, int wdth, int hght, float calibration_offset_x, float calibration_offset_y, bool reset_attention) { int scale, idx; int x, y, x2; if ((wavepoints_left == null) || (vertical_compression != prev_vertical_compression) || (disparity_map_compression != prev_disparity_map_compression)) { // create image objects to store the left and right camera data img_left = new classimage(); img_left.createImage(wdth, hght / vertical_compression); img_right = new classimage(); img_right.createImage(wdth, hght / vertical_compression); wavepoints_left = new float[hght / vertical_compression][][]; wavepoints_right = new float[hght / vertical_compression][][]; wavepoints_left_scale = new byte[hght / vertical_compression][]; wavepoints_left_pattern = new byte[hght / vertical_compression][]; wavepoints_right_scale = new byte[hght / vertical_compression][]; wavepoints_right_pattern = new byte[hght / vertical_compression][]; for (int i = 0; i < wavepoints_left.Length; i++) { wavepoints_left[i] = new float[wdth / step_size][]; wavepoints_right[i] = new float[wdth / step_size][]; wavepoints_left_scale[i] = new byte[wdth / step_size]; wavepoints_left_pattern[i] = new byte[wdth / step_size]; wavepoints_right_scale[i] = new byte[wdth / step_size]; wavepoints_right_pattern[i] = new byte[wdth / step_size]; for (int j = 0; j < wavepoints_left[i].Length; j++) { wavepoints_left[i][j] = new float[3]; wavepoints_right[i][j] = new float[3]; } } scalepoints_left = new int[no_of_scales][]; scalepoints_right = new int[no_of_scales][]; scalepoints_lookup = new int[no_of_scales][][]; for (int i = 0; i < no_of_scales; i++) { scalepoints_left[i] = new int[wdth + 1]; scalepoints_right[i] = new int[wdth + 1]; scalepoints_lookup[i] = new int[wdth][]; for (int j = 0; j < scalepoints_lookup[i].Length; j++) { scalepoints_lookup[i][j] = new int[wdth + 1]; } } // create an attention map attention_map = new bool[wdth, hght]; resetAttention(wdth, hght); int w = (wdth / (step_size * disparity_map_compression)) + 1; int h = (hght / (vertical_compression * disparity_map_compression)) + 1; disparity_map = new float[w][]; disparity_hits = new float[w][]; for (int i = 0; i < w; i++) { disparity_map[i] = new float[h]; disparity_hits[i] = new float[h]; } scale_width = new int[no_of_scales][]; int sc = 2; for (int s = 0; s < no_of_scales; s++) { scale_width[s] = new int[2]; scale_width[s][0] = (int)(wdth * surround_radius_percent * sc / 100); if (scale_width[s][0] < 2) scale_width[s][0] = 2; scale_width[s][1] = (int)((hght / vertical_compression) * surround_radius_percent * sc / 100); if (scale_width[s][1] < 2) scale_width[s][1] = 2; sc++; } } if (reset_attention) resetAttention(wdth, hght); // store compression values so that changes in these // values can be detected prev_vertical_compression = vertical_compression; prev_disparity_map_compression = disparity_map_compression; // set the images left_image = left_bmp; img_left.updateFromBitmapVerticalCompression(left_bmp, wdth, hght, vertical_compression, 0, 0); img_right.updateFromBitmapVerticalCompression(right_bmp, wdth, hght, vertical_compression, (int)calibration_offset_x, (int)calibration_offset_y); // update integrals img_left.updateIntegralImage(); img_right.updateIntegralImage(); // update average intensities for each row and column img_left.updateAverages(); img_right.updateAverages(); // disparity map dimensions int compressed_wdth = wdth / (step_size * disparity_map_compression); int compressed_hght = hght / (vertical_compression * disparity_map_compression); // clear the disparity map clearDisparityMap(compressed_wdth, compressed_hght); // update blobs on multiple scales for (scale = 0; scale < no_of_scales; scale++) { // get x and y radius for this scale int surround_pixels_x = scale_width[scale][0]; int surround_pixels_y = scale_width[scale][1]; // detect blobs at this scale img_left.detectBlobs(scale, surround_pixels_x, surround_pixels_y, step_size, wavepoints_left, wavepoints_left_scale, wavepoints_left_pattern); img_right.detectBlobs(scale, surround_pixels_x, surround_pixels_y, step_size, wavepoints_right, wavepoints_right_scale, wavepoints_right_pattern); } // update the scale points for fast searching float min_thresh = 5.0f; float min_grad = 0.5f; float left_diff, right_diff; float prev_left_diff = 0, prev_right_diff = 0; float prev_left_grad = 0, prev_right_grad = 0; float left_grad = 0, right_grad = 0; int max_disp = max_disparity * (wdth / step_size) / 100; int searchfactor = 4; int max_disp2 = max_disp / searchfactor; int max_wdth = wdth / searchfactor; int max_vertical_edge_difference = hght / 4; // assorted variables int no_of_points_left, no_of_points_right; int disp, x_left, vertical_left, x_left2, x_left3, no_of_candidates; int prev_pattern_left, next_pattern_left, idx2; int x_right, vertical_right, x_right2, x_right3, dx, prev_pattern_right, next_pattern_right; float diff_left, diff_row_left, diff_col_left, min_response_difference; float confidence, diff_right, response_difference; // for each row of the image for (y = 0; y < hght / vertical_compression; y++) { for (int sign = 0; sign < 8; sign++) { // go through each detection pattern // at present there are only two patterns: centre/surround and left/right for (int currPattern = PATTERN_CENTRE_SURROUND; currPattern <= PATTERN_LEFT_RIGHT; currPattern++) { // clear the number of points for (scale = 0; scale < no_of_scales; scale++) { scalepoints_left[scale][0] = 0; scalepoints_right[scale][0] = 0; for (x = 0; x < max_wdth; x++) scalepoints_lookup[scale][x][0] = 0; } int ww = wdth / step_size; for (x = 0; x < ww; x++) { int pattern = wavepoints_left_pattern[y][x]; if (pattern == currPattern) { // response value left_diff = wavepoints_left[y][x][0]; right_diff = wavepoints_right[y][x][0]; if ((x > 0) && ((left_diff != 0) || (right_diff != 0))) { float left_row_diff = wavepoints_left[y][x][1]; float right_row_diff = wavepoints_right[y][x][1]; // gradient - change in response along the row left_grad = left_diff - prev_left_diff; right_grad = right_diff - prev_right_diff; if (((left_row_diff > 0) && (right_row_diff > 0)) || ((left_row_diff < 0) && (right_row_diff < 0))) { float left_col_diff = wavepoints_left[y][x][2]; float right_col_diff = wavepoints_right[y][x][2]; if (((left_col_diff >= 0) && (right_col_diff >= 0)) || ((left_col_diff < 0) && (right_col_diff < 0))) { float left_horizontal_grad_change = left_grad - prev_left_grad; float right_horizontal_grad_change = right_grad - prev_right_grad; if ((left_diff != 0) && ((left_grad < -min_grad) || (left_grad > min_grad))) { // combiantions of response and gradient directions if (((sign == 0) && (left_diff > min_thresh) && (left_grad > 0) && (left_horizontal_grad_change > 0)) || ((sign == 1) && (left_diff < -min_thresh) && (left_grad > 0) && (left_horizontal_grad_change > 0)) || ((sign == 2) && (left_diff > min_thresh) && (left_grad <= 0) && (left_horizontal_grad_change > 0)) || ((sign == 3) && (left_diff < -min_thresh) && (left_grad <= 0) && (left_horizontal_grad_change > 0)) || ((sign == 4) && (left_diff > min_thresh) && (left_grad > 0) && (left_horizontal_grad_change <= 0)) || ((sign == 5) && (left_diff < -min_thresh) && (left_grad > 0) && (left_horizontal_grad_change <= 0)) || ((sign == 6) && (left_diff > min_thresh) && (left_grad <= 0) && (left_horizontal_grad_change <= 0)) || ((sign == 7) && (left_diff < -min_thresh) && (left_grad <= 0) && (left_horizontal_grad_change <= 0)) ) { // what is the best responding scale ? scale = wavepoints_left_scale[y][x]; // get the current index idx = scalepoints_left[scale][0] + 1; // set the x position scalepoints_left[scale][idx] = x; // increment the index scalepoints_left[scale][0]++; } } if ((right_diff != 0) && ((right_grad < -min_grad) || (right_grad > min_grad))) { // combiantions of response and gradient directions if (((sign == 0) && (right_diff > min_thresh) && (right_grad > 0) && (right_horizontal_grad_change > 0)) || ((sign == 1) && (right_diff < -min_thresh) && (right_grad > 0) && (right_horizontal_grad_change > 0)) || ((sign == 2) && (right_diff > min_thresh) && (right_grad <= 0) && (right_horizontal_grad_change > 0)) || ((sign == 3) && (right_diff < -min_thresh) && (right_grad <= 0) && (right_horizontal_grad_change > 0)) || ((sign == 4) && (right_diff > min_thresh) && (right_grad > 0) && (right_horizontal_grad_change <= 0)) || ((sign == 5) && (right_diff < -min_thresh) && (right_grad > 0) && (right_horizontal_grad_change <= 0)) || ((sign == 6) && (right_diff > min_thresh) && (right_grad <= 0) && (right_horizontal_grad_change <= 0)) || ((sign == 7) && (right_diff < -min_thresh) && (right_grad <= 0) && (right_horizontal_grad_change <= 0)) ) { scale = wavepoints_right_scale[y][x]; // get the current index idx = scalepoints_right[scale][0] + 1; // set the x position scalepoints_right[scale][idx] = x; // increment the index scalepoints_right[scale][0]++; x2 = x / searchfactor; for (int xx = x2; xx < x2 + max_disp2; xx++) { if ((xx > -1) && (xx < max_wdth)) { idx2 = scalepoints_lookup[scale][xx][0] + 1; scalepoints_lookup[scale][xx][idx2] = idx; scalepoints_lookup[scale][xx][0]++; } } } } } } } // record previous responses prev_left_grad = left_grad; prev_right_grad = right_grad; prev_left_diff = left_diff; prev_right_diff = right_diff; } } // stereo match for (scale = no_of_scales - 1; scale >= 0; scale--) { no_of_points_left = scalepoints_left[scale][0]; no_of_points_right = scalepoints_right[scale][0]; //for each possible match in the left image for (int i = no_of_points_left - 1; i >= 0; i--) { disp = -1; // get the position and response magnitude of the left point x_left = scalepoints_left[scale][i + 1]; vertical_left = img_left.column_maximal_edge[x_left]; diff_left = wavepoints_left[y][x_left][0]; diff_row_left = wavepoints_left[y][x_left][1]; diff_col_left = wavepoints_left[y][x_left][2]; x_left2 = x_left - 2; if (x_left2 < 0) x_left2 = 0; x_left3 = x_left + 2; if (x_left3 >= ww) x_left3 = ww - 1; prev_pattern_left = wavepoints_left_pattern[y][x_left2]; next_pattern_left = wavepoints_left_pattern[y][x_left3]; min_response_difference = match_threshold; x2 = x_left / searchfactor; no_of_candidates = scalepoints_lookup[scale][x2][0]; // for each possible match in the right image // note here that we scan from right to left for (int j = no_of_candidates - 1; j >= 0; j--) { idx2 = scalepoints_lookup[scale][x2][j + 1]; // get the horizontal position of the possible match in the right image x_right = scalepoints_right[scale][idx2]; // what's the disparity ? dx = x_left - x_right; // is the disparity in the range we expect ? if ((dx > -1) && (dx < max_disp)) { // vertical context checking vertical_right = img_left.column_maximal_edge[x_right]; int dv = vertical_left - vertical_right; if (dv < 0) dv = -dv; // is the vertical context within tollerance ? if (dv < max_vertical_edge_difference) { // check the ordering of patterns x_right2 = x_right - 2; if (x_right2 < 0) x_right2 = 0; prev_pattern_right = wavepoints_right_pattern[y][x_right2]; if (prev_pattern_left == prev_pattern_right) { x_right3 = x_right + 2; if (x_right3 >= ww) x_right3 = ww - 1; next_pattern_right = wavepoints_right_pattern[y][x_right3]; if (next_pattern_left == next_pattern_right) { // check the response magnitude difference diff_right = wavepoints_right[y][x_right][0]; response_difference = diff_right - diff_left; if (response_difference < 0) response_difference = -response_difference; response_difference *= dv; // is the magnitude difference the best that we've found so far ? ? if (response_difference < min_response_difference) { // record the disparity and minimum difference disp = dx; min_response_difference = response_difference; } } } } } // if the horizontal difference is too large then we may // as well abandon the search if (dx > max_disp) break; } if (disp > -1) { // how confident are we in this match ? confidence = 1.0f - (min_response_difference / match_threshold); confidence /= (no_of_scales - scale); confidence *= confidence; // get the position on the disparity map int mx = (x_left + disp) / disparity_map_compression; int my = y / disparity_map_compression; // update the dispalrity map using a gaussian // probability distribution updateDisparityMap(mx, my, compressed_wdth, compressed_hght, scale, disp * step_size, confidence); } } } } } } // update disparity map float disparity_value; for (y = compressed_hght; y >= 0; y--) { for (x = compressed_wdth - 1; x >= 0; x--) { disparity_value = disparity_map[x][y]; if (disparity_value < 0) { disparity_value = 0; } else { disparity_value /= disparity_hits[x][y]; disparity_map[x][y] = disparity_value; } } } // get a fixed quantity of features which may // subsequently be used to create ray models getSelectedFeatures(wdth, hght); }