/// <summary> /// detect the centre spot within the image /// </summary> /// <param name="calibration_image"></param> /// <param name="width"></param> /// <param name="height"></param> private void detectCentreSpot(Byte[] calibration_image, int width, int height, ref int grid_x, ref int grid_y) { grid_x = 0; grid_y = 0; int search_radius_x = (int)(width * separation_factor / 1.0f); if (search_radius_x < 2) search_radius_x = 2; int search_radius_y = (int)(height * separation_factor / 1.0f); if (search_radius_y < 2) search_radius_y = 2; int search_radius_x2 = search_radius_x / 2; int search_radius_y2 = search_radius_y / 2; int tx = width/4; int bx = width - 1 - tx; int ty = height/4; int by = height - 1 - ty; if (ROI != null) { tx = ROI.tx + ((ROI.bx - ROI.tx) / 4); bx = ROI.tx + ((ROI.bx - ROI.tx) * 3 / 4); ty = ROI.ty + ((ROI.by - ROI.ty) / 4); by = ROI.ty + ((ROI.by - ROI.ty) * 3 / 4); } float max_diff = 0; centre_spot = new calibration_point(width / 2, height / 2); for (int x = tx; x < bx; x++) { for (int y = ty; y < by; y++) { int inner = 0; int inner_hits = 0; int outer = 0; int outer_hits = 0; for (int xx = x - search_radius_x; xx < x + search_radius_x; xx++) { for (int yy = y - search_radius_y; yy < y + search_radius_y; yy++) { if (coverage[xx, yy] == null) { int n = (yy * width) + xx; if ((xx > x - search_radius_x2) && (xx < x + search_radius_x2) && (yy > y - search_radius_y2) && (yy < y + search_radius_y2)) { inner += calibration_image[n]; inner_hits++; } else { outer += calibration_image[n]; outer_hits++; } } } } if (inner_hits > 0) { float diff = (outer / (float)outer_hits) - (inner / (float)inner_hits); if (diff > max_diff) { max_diff = diff; centre_spot.x = x; centre_spot.y = y; } } } } int cx = (int)centre_spot.x; int cy = (int)centre_spot.y; switch (centre_spot_position) { case CENTRE_SPOT_NW: { cx = (int)centre_spot.x + search_radius_x; cy = (int)centre_spot.y + search_radius_y; break; } case CENTRE_SPOT_NE: { cx = (int)centre_spot.x - search_radius_x; cy = (int)centre_spot.y + search_radius_y; break; } case CENTRE_SPOT_SW: { cx = (int)centre_spot.x + search_radius_x; cy = (int)centre_spot.y - search_radius_y; break; } case CENTRE_SPOT_SE: { cx = (int)centre_spot.x - search_radius_x; cy = (int)centre_spot.y - search_radius_y; break; } } if (coverage[cx, cy] != null) { grid_x = coverage[cx, cy].grid_x; grid_y = coverage[cx, cy].grid_y; int px = (int)(coverage[cx, cy].x / coverage[cx, cy].hits); int py = (int)(coverage[cx, cy].y / coverage[cx, cy].hits); drawing.drawBox(corners_image, width, height, px, py, 4, 255, 255, 255, 0); } else { bool found = false; int r = 0; while ((r < 3) && (!found)) { int xx = cx - r; int yy = cy; while ((xx <= cx + r) && (!found)) { yy = cy - r; while ((yy <= cy + r) && (!found)) { if (coverage[xx, yy] != null) { grid_x = coverage[xx, yy].grid_x; grid_y = coverage[xx, yy].grid_y; int px = (int)(coverage[xx, yy].x / coverage[xx, yy].hits); int py = (int)(coverage[xx, yy].y / coverage[xx, yy].hits); drawing.drawBox(corners_image, width, height, px, py, 4, 255, 255, 255, 0); found = true; } yy++; } xx++; } r++; } } }
/// <summary> /// parse an xml node to extract camera calibration parameters /// </summary> /// <param name="xnod"></param> /// <param name="level"></param> public void LoadFromXml(XmlNode xnod, int level) { XmlNode xnodWorking; if (xnod.Name == "FieldOfViewDegrees") camera_FOV_degrees = Convert.ToInt32(xnod.InnerText); if (xnod.Name == "ImageDimensions") { IFormatProvider format = new System.Globalization.CultureInfo("en-GB"); String[] dimStr = xnod.InnerText.Split(','); image_width = Convert.ToInt32(dimStr[0], format); image_height = Convert.ToInt32(dimStr[1], format); } if (xnod.Name == "CentreOfDistortion") { IFormatProvider format = new System.Globalization.CultureInfo("en-GB"); String[] centreStr = xnod.InnerText.Split(','); centre_of_distortion = new calibration_point( Convert.ToSingle(centreStr[0], format), Convert.ToSingle(centreStr[1], format)); } if (xnod.Name == "DistortionCoefficients") { if (xnod.InnerText != "") { IFormatProvider format = new System.Globalization.CultureInfo("en-GB"); String[] coeffStr = xnod.InnerText.Split(','); fitter = new polynomial(); fitter.SetDegree(coeffStr.Length - 1); for (int i = 0; i < coeffStr.Length; i++) fitter.SetCoeff(i, Convert.ToSingle(coeffStr[i], format)); } } if (xnod.Name == "Scale") { scale = Convert.ToSingle(xnod.InnerText); } if (xnod.Name == "RotationDegrees") { rotation = Convert.ToSingle(xnod.InnerText) / 180.0f * (float)Math.PI; } if (xnod.Name == "RMSerror") { min_RMS_error = Convert.ToSingle(xnod.InnerText); // update the calibration lookup table updateCalibrationMap(); } // if this is an element, extract any attributes /* if (xnod.NodeType == XmlNodeType.Element) { XmlNamedNodeMap mapAttributes = xnod.Attributes; for (int i = 0; i < mapAttributes.Count; i += 1) { //Console.WriteLine(pad + " " + mapAttributes.Item(i).Name // + " = " + mapAttributes.Item(i).Value); } } */ // call recursively on all children of the current node if (xnod.HasChildNodes) { xnodWorking = xnod.FirstChild; while (xnodWorking != null) { LoadFromXml(xnodWorking, level + 1); xnodWorking = xnodWorking.NextSibling; } } }
/// <summary> /// updates the rectified position of the centre of the calibration pattern. This can be used to /// calculate the horizontal and vertical alignment offsets for stereo cameras /// </summary> private void updatePatternCentreRectified() { int rectified_x = 0, rectified_y = 0; if (rectifyPoint((int)pattern_centre_x, (int)pattern_centre_y, ref rectified_x, ref rectified_y)) { pattern_centre_rectified = new calibration_point(rectified_x, rectified_y); } }
private void detectLensDistortion(int width, int height, int grid_x, int grid_y) { if (grid != null) { if (grid[grid_x, grid_y] != null) { // field of vision in radians float FOV_horizontal = camera_FOV_degrees * (float)Math.PI / 180.0f; float FOV_vertical = FOV_horizontal *height / (float)width; // center point of the grid within the image pattern_centre_x = (int)grid[grid_x, grid_y].x; pattern_centre_y = (int)grid[grid_x, grid_y].y; // calculate the distance to the centre grid point on the ground plane float ground_dist_to_point = camera_dist_to_pattern_centre_mm; // line of sight distance between the camera lens and the centre point float camera_to_point_dist = (float)Math.Sqrt((ground_dist_to_point * ground_dist_to_point) + (camera_height_mm * camera_height_mm)); distance_to_pattern_centre = camera_to_point_dist; // tilt angle at the centre point float centre_tilt = (float)Math.Asin(camera_height_mm / camera_to_point_dist); // angle subtended by one grid spacing at the centre float point_pan = (float)Math.Asin(calibration_pattern_spacing_mm / camera_to_point_dist); // grid width at the centre point float x1 = pattern_centre_x + (point_pan * width / FOV_horizontal); // calculate the distance to the observed grid point on the ground plane ground_dist_to_point = camera_dist_to_pattern_centre_mm + ((grid_y + 2) * calibration_pattern_spacing_mm); // line of sight distance between the camera lens and the observed point camera_to_point_dist = (float)Math.Sqrt((ground_dist_to_point * ground_dist_to_point) + (camera_height_mm * camera_height_mm)); // tilt angle float point_tilt = (float)Math.Asin(camera_height_mm / camera_to_point_dist); // angle subtended by one grid spacing point_pan = (float)Math.Asin(calibration_pattern_spacing_mm / camera_to_point_dist); // calc the position of the grid point within the image after rectification float x2 = pattern_centre_x + (point_pan * width / FOV_horizontal); float y2 = pattern_centre_y + ((point_tilt - centre_tilt) * height / FOV_vertical); // calc the gradient float grad = (x2 - x1) / (float)(y2 - pattern_centre_y); float baseline_fraction = baseline_offset / (float)(calibration_pattern_spacing_mm); centre_of_distortion = new calibration_point(0, 0); int hits = 0; float cx = 0, cy = 0; for (int x = 0; x < grid.GetLength(0); x++) { for (int y = 0; y < grid.GetLength(1); y++) { if (grid[x, y] != null) { // calculate the distance to the observed grid point on the ground plane ground_dist_to_point = camera_dist_to_pattern_centre_mm + ((grid_y - y) * calibration_pattern_spacing_mm); // line of sight distance between the camera lens and the observed point camera_to_point_dist = (float)Math.Sqrt((ground_dist_to_point * ground_dist_to_point) + (camera_height_mm * camera_height_mm)); // tilt angle point_tilt = (float)Math.Asin(camera_height_mm / camera_to_point_dist); // distance to the point on the ground plave along the x (horizontal axis) float ground_dist_to_point_x = ((x - grid_x) * calibration_pattern_spacing_mm) + baseline_offset; // pan angle point_pan = (float)Math.Asin(ground_dist_to_point_x / camera_to_point_dist); // calc the position of the grid point within the image after rectification float w = ((x1 - pattern_centre_x) + ((grid[x, y].y - pattern_centre_y) * grad)); float wbaseline = baseline_fraction * (grid[x, y].y - pattern_centre_y) * grad; float rectified_x = pattern_centre_x + (w * (x - grid_x)) - wbaseline; float rectified_y = pattern_centre_y + ((point_tilt - centre_tilt) * height / FOV_vertical); grid[x, y].rectified_x = rectified_x; grid[x, y].rectified_y = rectified_y; cx += (grid[x, y].x - rectified_x); cy += (grid[x, y].y - rectified_y); hits++; } } } if (hits > 0) { // a ballpack figure for the centre of distortion centre_of_distortion.x = (width / 2) + (cx / (float)hits); centre_of_distortion.y = (height / 2) + (cy / (float)hits); float winner_x = centre_of_distortion.x; float winner_y = centre_of_distortion.y; float min_rms_err = 999999; int radius = 5; for (int search_x = (int)centre_of_distortion.x - radius; search_x <= (int)centre_of_distortion.x + radius; search_x++) { for (int search_y = (int)centre_of_distortion.y - radius; search_y <= (int)centre_of_distortion.y + radius; search_y++) { polynomial curvefit = new polynomial(); curvefit.SetDegree(2); for (int x = 0; x < grid.GetLength(0); x++) { for (int y = 0; y < grid.GetLength(1); y++) { if (grid[x, y] != null) { addDataPoint(grid[x, y].x, grid[x, y].y, grid[x, y].rectified_x, grid[x, y].rectified_y, curvefit); // intermediary points if (y > 0) { if (grid[x, y - 1] != null) { float xx = grid[x, y - 1].x + ((grid[x, y].x - grid[x, y - 1].x) / 2); float yy = grid[x, y - 1].y + ((grid[x, y].y - grid[x, y - 1].y) / 2); float rectified_xx = grid[x, y - 1].rectified_x + ((grid[x, y].rectified_x - grid[x, y - 1].rectified_x) / 2); float rectified_yy = grid[x, y - 1].rectified_y + ((grid[x, y].rectified_y - grid[x, y - 1].rectified_y) / 2); addDataPoint(xx, yy, rectified_xx, rectified_yy, curvefit); } } if (x > 0) { if (grid[x-1, y] != null) { float xx = grid[x-1, y].x + ((grid[x, y].x - grid[x-1, y].x) / 2); float yy = grid[x-1, y].y + ((grid[x, y].y - grid[x-1, y].y) / 2); float rectified_xx = grid[x-1, y].rectified_x + ((grid[x, y].rectified_x - grid[x-1, y].rectified_x) / 2); float rectified_yy = grid[x-1, y].rectified_y + ((grid[x, y].rectified_y - grid[x-1, y].rectified_y) / 2); addDataPoint(xx, yy, rectified_xx, rectified_yy, curvefit); } } } } } curvefit.Solve(); float rms_err = curvefit.GetRMSerror(); if (rms_err < min_rms_err) { min_rms_err = rms_err; winner_x = search_x; winner_y = search_y; fitter = curvefit; } } } centre_of_distortion.x = winner_x; centre_of_distortion.y = winner_y; } } } }