void _bw_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = sender as BackgroundWorker; Camera camera = _camera; // Note: the image is disposed once the camera gets disposed. // Therefore you should copy the image if needed in another // thread or make sure the camera is not disposed. Image <Bgr, byte> img; while (!bw.CancellationPending) { img = camera.Frame(); if (img != null) { lock (_lock_event) { if (_on_frame != null) { _on_frame(this, img); } } img.Dispose(); } _fts.UpdateAndWait(); } e.Cancel = true; _stopped.Set(); }
/// <summary> /// Calculates the transformation matrix, which is used to transform the 3d-object points, which were scanned with reference /// to the moved marker coordinate system, back to the initial marker system and henceforth back to the camera system. /// The camera object is needed in order to gain the current camera frame. Furthermore, the cameras' intrinsics are needed /// to perform an extrinsic calibration. Note, that every kind of pattern can be used. /// /// The transformation matrix is calculated as follows: /// * If 'UpdateTransformation' is being called for the first time, an extrinsic calibration is performed in order to find /// the initial orientation of the used pattern. /// * If the initial orientation has already been found, the extrinsic calibration is performed again. Afterwards /// the current orientation is available, represented by the extrinsic matrix. /// * Form the extrinsic matrix (4x3) (current position) to a homogeneous 4x4 matrix. /// * The final transformation matrix is calculated as follows: _final = initial * current.Inverse(); /// </summary> public bool UpdateTransformation(Camera the_cam) { Matrix extrinsicM1 = Matrix.Identity(4, 4); ExtrinsicCameraParameters ecp_pattern = null; ExtrinsicCalibration ec_pattern = null; Emgu.CV.Image<Gray, Byte> gray_img = null; System.Drawing.PointF[] currentImagePoints; //first call: calculate extrinsics for initial position if (_firstCallUpdateTransformation == true && _cpattern != null) { gray_img = the_cam.Frame().Convert<Gray, Byte>(); //set the patterns property: intrinsic parameters _cpattern.IntrinsicParameters = the_cam.Intrinsics; if (_cpattern.FindPattern(gray_img, out currentImagePoints)) { try { //extr. calibration (initial position) ec_pattern = new ExtrinsicCalibration(_cpattern.ObjectPoints, the_cam.Intrinsics); ecp_pattern = ec_pattern.Calibrate(currentImagePoints); if (ecp_pattern != null) { _ecp_A = ecp_pattern; _extrinsicMatrix_A = ExtractExctrinsicMatrix(_ecp_A); _logger.Info("Initial Position found."); _firstCallUpdateTransformation = false; } } catch (Exception e) { _logger.Warn("Initial Position - Caught Exception: {0}.", e); _firstCallUpdateTransformation = true; _ecp_A = null; return false; } } else { _logger.Warn("Pattern not found."); _firstCallUpdateTransformation = true; _ecp_A = null; return false; } } //if initial position and pattern are available - calculate the transformation if (_ecp_A != null && _cpattern != null) { gray_img = the_cam.Frame().Convert<Gray, Byte>(); //try to find composite pattern if (_cpattern.FindPattern(gray_img, out currentImagePoints)) { //extrinsic calibration in order to find the current orientation ec_pattern = new ExtrinsicCalibration(_cpattern.ObjectPoints, the_cam.Intrinsics); ecp_pattern = ec_pattern.Calibrate(currentImagePoints); if (ecp_pattern != null) { //extract current extrinsic matrix extrinsicM1 = ExtractExctrinsicMatrix(ecp_pattern); _logger.Info("UpdateTransformation: Transformation found."); } else { _logger.Warn("UpdateTransformation: Extrinsics of moved marker system not found."); return false; } } else { _logger.Warn("UpdateTransformation: Pattern not found."); return false; } //now calculate the final transformation matrix _final = _extrinsicMatrix_A * extrinsicM1.Inverse(); return true; } else { _logger.Warn("UpdateTransformation: No Pattern has been chosen."); return false; } }
/// <summary> /// Calculates the transformation matrix, which is used to transform the 3d-object points, which were scanned with reference /// to the moved marker coordinate system, back to the initial marker system and henceforth back to the camera system. /// The camera object is needed in order to gain the current camera frame. Furthermore, the cameras' intrinsics are needed /// to perform an extrinsic calibration. Note, that every kind of pattern can be used. /// /// The transformation matrix is calculated as follows: /// * If 'UpdateTransformation' is being called for the first time, an extrinsic calibration is performed in order to find /// the initial orientation of the used pattern. /// * If the initial orientation has already been found, the extrinsic calibration is performed again. Afterwards /// the current orientation is available, represented by the extrinsic matrix. /// * Form the extrinsic matrix (4x3) (current position) to a homogeneous 4x4 matrix. /// * The final transformation matrix is calculated as follows: _final = initial * current.Inverse(); /// </summary> public bool UpdateTransformation(Camera the_cam) { Matrix extrinsicM1 = Matrix.Identity(4, 4); ExtrinsicCameraParameters ecp_pattern = null; ExtrinsicCalibration ec_pattern = null; Emgu.CV.Image <Gray, Byte> gray_img = null; System.Drawing.PointF[] currentImagePoints; //first call: calculate extrinsics for initial position if (_firstCallUpdateTransformation == true && _cpattern != null) { gray_img = the_cam.Frame().Convert <Gray, Byte>(); //set the patterns property: intrinsic parameters _cpattern.IntrinsicParameters = the_cam.Intrinsics; if (_cpattern.FindPattern(gray_img, out currentImagePoints)) { try { //extr. calibration (initial position) ec_pattern = new ExtrinsicCalibration(_cpattern.ObjectPoints, the_cam.Intrinsics); ecp_pattern = ec_pattern.Calibrate(currentImagePoints); if (ecp_pattern != null) { _ecp_A = ecp_pattern; _extrinsicMatrix_A = ExtractExctrinsicMatrix(_ecp_A); _logger.Info("Initial Position found."); _firstCallUpdateTransformation = false; } } catch (Exception e) { _logger.Warn("Initial Position - Caught Exception: {0}.", e); _firstCallUpdateTransformation = true; _ecp_A = null; return(false); } } else { _logger.Warn("Pattern not found."); _firstCallUpdateTransformation = true; _ecp_A = null; return(false); } } //if initial position and pattern are available - calculate the transformation if (_ecp_A != null && _cpattern != null) { gray_img = the_cam.Frame().Convert <Gray, Byte>(); //try to find composite pattern if (_cpattern.FindPattern(gray_img, out currentImagePoints)) { //extrinsic calibration in order to find the current orientation ec_pattern = new ExtrinsicCalibration(_cpattern.ObjectPoints, the_cam.Intrinsics); ecp_pattern = ec_pattern.Calibrate(currentImagePoints); if (ecp_pattern != null) { //extract current extrinsic matrix extrinsicM1 = ExtractExctrinsicMatrix(ecp_pattern); _logger.Info("UpdateTransformation: Transformation found."); } else { _logger.Warn("UpdateTransformation: Extrinsics of moved marker system not found."); return(false); } } else { _logger.Warn("UpdateTransformation: Pattern not found."); return(false); } //now calculate the final transformation matrix _final = _extrinsicMatrix_A * extrinsicM1.Inverse(); return(true); } else { _logger.Warn("UpdateTransformation: No Pattern has been chosen."); return(false); } }