public void SetUp3DDisplays() { // Set display virtual camera parameters to match those of the real camera // being used in MonoSLAM Partially_Initialised_Feature_Measurement_Model default_pifmm = monoslaminterface.GetDefaultFeatureTypeForInitialisation(); if (default_pifmm != null) { Line_Init_Wide_Point_Feature_Measurement_Model default_wide_pifmm = (Line_Init_Wide_Point_Feature_Measurement_Model)default_pifmm; Graphics_Fku = default_wide_pifmm.get_camera().Fku(); Graphics_Fkv = default_wide_pifmm.get_camera().Fkv(); Graphics_U0 = default_wide_pifmm.get_camera().U0(); Graphics_V0 = default_wide_pifmm.get_camera().V0(); Graphics_Kd1 = default_wide_pifmm.get_camera().Kd1(); // First display for external 3D view /* threedtool = new ThreeDToolGlowWidget(this, controlpanel1->Width() + controlpanel2->Width(), 0, monoslaminterface->GetCameraWidth(), monoslaminterface->GetCameraHeight()); threedtool->DrawEvent.Attach(this, &MonoSLAMGlow::Draw3D); threedtool->ProcessHitsEvent.Attach(this, &MonoSLAMGlow::ProcessHits); threedtool->SetCameraParameters(Graphics_Fku, Graphics_Fkv, Graphics_U0, Graphics_V0); */ // Set start position for GL camera in 3D tool // This is x, y, z position Vector rthreed = new Vector(0.0f, 0.2f, -1.5f); // This is camera orientation in my normal coordinate system // (z forward, y up, x left) Quaternion qWRthreed = new Quaternion(0.0f, 0.0f, 0.0f, 1.0f); // We need to adjust by this rotation to fit GL coordinate frame // (z backward, y up, x right) // So rotate by pi about y axis Quaternion qROthreed = new Quaternion(0.0f, 1.0f, 0.0f, 0.0f); Quaternion qWOthreed = qWRthreed.Multiply(qROthreed); //threedtool.SetCameraPositionOrientation(rthreed, qWOthreed); // Second 3D display for images and augmented reality /* image_threedtool = new ThreeDToolGlowWidget(this, controlpanel1->Width() + controlpanel2->Width(), threedtool->Height(), monoslaminterface->GetCameraWidth(), monoslaminterface->GetCameraHeight()); image_threedtool.DrawEvent.Attach(this, &MonoSLAMGlow::ImageDraw3D); image_threedtool.ProcessHitsEvent.Attach(this, &MonoSLAMGlow::ImageProcessHits); image_threedtool.SetCameraParameters(Graphics_Fku, Graphics_Fkv, Graphics_U0, Graphics_V0); */ // Set up initial state of virtual camera for image display to match // state vector Scene_Single scene = monoslaminterface.GetScene(); if (scene != null) { Vector v = scene.get_xv(); if (v != null) { scene.get_motion_model().func_xp(v); ThreeD_Motion_Model threed_motion_model = (ThreeD_Motion_Model)scene.get_motion_model(); Vector3D r = threed_motion_model.get_rRES(); Quaternion qWR = threed_motion_model.get_qRES(); // q is qWR between world frame and Scene robot frame // We need to adjust by this rotation to fit GL coordinate frame // (z backward, y up, x right) // So rotate by pi about y axis Quaternion qRO = new Quaternion(0.0f, 1.0f, 0.0f, 0.0f); Quaternion qWO = qWR.Multiply(qRO); //image_threedtool.SetCameraPositionOrientation(r, qWO); } else { Debug.WriteLine("Scene xp not defined"); } } else { Debug.WriteLine("No scene object defined"); } } else { Debug.WriteLine("No partially initialised feature measurement model found"); } }
/// <summary> /// /// </summary> /// <param name="xv">position and orientation of the camera</param> /// <param name="u">Control vector of accelerations</param> /// <param name="delta_t">time step in seconds</param> public override void func_fv_and_dfv_by_dxv(Vector xv, Vector u, float delta_t) { Vector3D rold, vold, omegaold, rnew, vnew, omeganew; Quaternion qold, qnew; // Separate things out to make it clearer rold = new Vector3D(0, 0, 0); vold = new Vector3D(0, 0, 0); omegaold = new Vector3D(0, 0, 0); qold = new Quaternion(); extract_r_q_v_omega(xv, rold, qold, vold, omegaold); Vector3D acceleration = new Vector3D(u); // rnew = r + v * delta_t //rnew = (Vector3D)((Point3D)rold + (Point3D)vold * delta_t); rnew = new Vector3D(rold + vold * delta_t); // qnew = q x q(omega * delta_t) // Keep qwt ( = q(omega * delta_t)) for later use Quaternion qwt = new Quaternion(omegaold * delta_t); qnew = qold.Multiply(qwt); // vnew = v vnew = new Vector3D(vold + acceleration * delta_t); // omeganew = omega omeganew = omegaold; // Put it all together compose_xv(ref rnew, ref qnew, ref vnew, ref omeganew, ref fvRES); // cout << "rold qold vold omegaold" << rold << qold // << vold << omegaold; // cout << "rnew qnew vnew omeganew" << rnew << qnew // << vnew << omeganew; // Now on to the Jacobian... // Identity is a good place to start since overall structure is like this // I 0 I * delta_t 0 // 0 dqnew_by_dq 0 dqnew_by_domega // 0 0 I 0 // 0 0 0 I dfv_by_dxvRES.SetIdentity(); // Fill in dxnew_by_dv = I * delta_t MatrixFixed Temp33A = new MatrixFixed(3,3); Temp33A.SetIdentity(); Temp33A *= delta_t; dfv_by_dxvRES.Update(Temp33A, 0, 7); // Fill in dqnew_by_dq // qnew = qold x qwt ( = q3 = q2 x q1 in Scene/newbits.cc language) MatrixFixed Temp44A = MatrixFixed.dq3_by_dq2(qwt); //4,4 dfv_by_dxvRES.Update(Temp44A, 3, 3); // Fill in dqnew_by_domega = d(q x qwt)_by_dqwt . dqwt_by_domega Temp44A = MatrixFixed.dq3_by_dq1(qold); // Temp44A is d(q x qwt) by dqwt // Use function below for dqwt_by_domega MatrixFixed Temp43A = new MatrixFixed(4,3); dqomegadt_by_domega(omegaold, delta_t, Temp43A); // Multiply them together MatrixFixed Temp43B = Temp44A * Temp43A; // And plug it in dfv_by_dxvRES.Update(Temp43B, 3, 10); // cout << "dfv_by_dxvRES" << dfv_by_dxvRES; }
// Combine two rotations (this is VERY costly). // @param aa the inner rotation // @returns the resulting rotation (like this*q for matrices) public AngleAxis Multiply (AngleAxis aa) { // This is best done as quaternions Quaternion q1 = new Quaternion(this); Quaternion q2 = new Quaternion(aa); return new AngleAxis(q1.Multiply(q2)); }