/// <summary> /// Initialize frame grabber with camera /// </summary> /// <param name="camera"></param> public FrameGrabber(Camera camera) { _camera = camera; _lock_event = new object(); _fts = new FixedTimeStep(30); _bw = new BackgroundWorker(); _bw.WorkerSupportsCancellation = true; _bw.DoWork += new DoWorkEventHandler(_bw_DoWork); _bw.RunWorkerCompleted += new RunWorkerCompletedEventHandler(_bw_RunWorkerCompleted); _stopped = new ManualResetEvent(false); }
/// <summary> /// Creates a off-axis perspective matrix from camera intrinsics. /// </summary> /// <remarks> Note that OSG layouts their matrices in row-major order, so you /// need to transpose this matrix, before passing it to OSG</remarks> /// <param name="camera">Camera with intrinsic parameters</param> /// <returns>Perspective matrix in column-major</returns> public static Matrix FromCamera(Camera camera, double near, double far) { if (!camera.HasIntrinsics) { throw new ArgumentException("Camera has no intrinsic calibration"); } // see // http://opencv.willowgarage.com/wiki/Posit // http://opencv.willowgarage.com/documentation/camera_calibration_and_3d_reconstruction.html // http://aoeu.snth.net/static/gen-perspective.pdf // http://www.hitl.washington.edu/artoolkit/mail-archive/message-thread-00654-Re--Questions-concering-.html Matrix m = new Matrix(4, 4, 0.0); Emgu.CV.IntrinsicCameraParameters icp = camera.Intrinsics; double fx = icp.IntrinsicMatrix[0, 0]; double fy = icp.IntrinsicMatrix[1, 1]; double px = icp.IntrinsicMatrix[0, 2]; double py = icp.IntrinsicMatrix[1, 2]; double w = camera.FrameWidth; double h = camera.FrameHeight; double dist = far - near; // First row m[0, 0] = 2.0 * (fx / w); m[0, 2] = 2.0 * (px / w) - 1.0; // Second row m[1, 1] = 2.0 * (fy / h); m[1, 2] = 2.0 * (py / h) - 1.0; // Third row m[2, 2] = -(far + near) / dist; m[2, 3] = -2.0 * far * near / dist; // Fourth row (note the flip) m[3, 2] = -1.0; return m; }
/// <summary> /// Calculates the transformation matrix, which is used to transform the 3d-object points, which were scanned with reference /// to the moved marker coordinate system, back to the initial marker system and henceforth back to the camera system. /// The camera object is needed in order to gain the current camera frame. Furthermore, the cameras' intrinsics are needed /// to perform an extrinsic calibration. Note, that every kind of pattern can be used. /// /// The transformation matrix is calculated as follows: /// * If 'UpdateTransformation' is being called for the first time, an extrinsic calibration is performed in order to find /// the initial orientation of the used pattern. /// * If the initial orientation has already been found, the extrinsic calibration is performed again. Afterwards /// the current orientation is available, represented by the extrinsic matrix. /// * Form the extrinsic matrix (4x3) (current position) to a homogeneous 4x4 matrix. /// * The final transformation matrix is calculated as follows: _final = initial * current.Inverse(); /// </summary> public bool UpdateTransformation(Camera the_cam) { Matrix extrinsicM1 = Matrix.Identity(4, 4); ExtrinsicCameraParameters ecp_pattern = null; ExtrinsicCalibration ec_pattern = null; Emgu.CV.Image<Gray, Byte> gray_img = null; System.Drawing.PointF[] currentImagePoints; //first call: calculate extrinsics for initial position if (_firstCallUpdateTransformation == true && _cpattern != null) { gray_img = the_cam.Frame().Convert<Gray, Byte>(); //set the patterns property: intrinsic parameters _cpattern.IntrinsicParameters = the_cam.Intrinsics; if (_cpattern.FindPattern(gray_img, out currentImagePoints)) { try { //extr. calibration (initial position) ec_pattern = new ExtrinsicCalibration(_cpattern.ObjectPoints, the_cam.Intrinsics); ecp_pattern = ec_pattern.Calibrate(currentImagePoints); if (ecp_pattern != null) { _ecp_A = ecp_pattern; _extrinsicMatrix_A = ExtractExctrinsicMatrix(_ecp_A); _logger.Info("Initial Position found."); _firstCallUpdateTransformation = false; } } catch (Exception e) { _logger.Warn("Initial Position - Caught Exception: {0}.", e); _firstCallUpdateTransformation = true; _ecp_A = null; return false; } } else { _logger.Warn("Pattern not found."); _firstCallUpdateTransformation = true; _ecp_A = null; return false; } } //if initial position and pattern are available - calculate the transformation if (_ecp_A != null && _cpattern != null) { gray_img = the_cam.Frame().Convert<Gray, Byte>(); //try to find composite pattern if (_cpattern.FindPattern(gray_img, out currentImagePoints)) { //extrinsic calibration in order to find the current orientation ec_pattern = new ExtrinsicCalibration(_cpattern.ObjectPoints, the_cam.Intrinsics); ecp_pattern = ec_pattern.Calibrate(currentImagePoints); if (ecp_pattern != null) { //extract current extrinsic matrix extrinsicM1 = ExtractExctrinsicMatrix(ecp_pattern); _logger.Info("UpdateTransformation: Transformation found."); } else { _logger.Warn("UpdateTransformation: Extrinsics of moved marker system not found."); return false; } } else { _logger.Warn("UpdateTransformation: Pattern not found."); return false; } //now calculate the final transformation matrix _final = _extrinsicMatrix_A * extrinsicM1.Inverse(); return true; } else { _logger.Warn("UpdateTransformation: No Pattern has been chosen."); return false; } }