Esempio n. 1
0
        /// <summary>
        /// Base-class constructor. This should be called in the constructor of any 
        /// derived class.
        /// </summary>
        /// <param name="position_state_size">The number of dimensions in the position state</param>
        /// <param name="state_size">The total state size</param>
        /// <param name="control_size">The control state size</param>
        /// <param name="m_m_d_t">A string representing the motion model dimensionality type </param>
        /// <param name="m_m_t">A unique string representing this particular motion model type</param>
        public Motion_Model(uint position_state_size, 
			   uint state_size, 
			   uint control_size, 
			   String m_m_d_t, String m_m_t)
        {
            POSITION_STATE_SIZE = position_state_size;
            STATE_SIZE = state_size;
            CONTROL_SIZE = control_size;
            motion_model_dimensionality_type = m_m_d_t;
            motion_model_type = m_m_t;

            fvRES = new Vector(STATE_SIZE);
            xpRES = new Vector(POSITION_STATE_SIZE);
            fv_noisyRES = new Vector(STATE_SIZE);
            xvredefRES = new Vector(STATE_SIZE);
            xvnormRES = new Vector(STATE_SIZE);
            zeroedxvRES = new Vector(STATE_SIZE);
            xpredefRES = new Vector(POSITION_STATE_SIZE);

            dfv_by_dxvRES = new MatrixFixed(STATE_SIZE, STATE_SIZE);
            QxRES = new MatrixFixed(STATE_SIZE, STATE_SIZE);
            dxp_by_dxvRES = new MatrixFixed(POSITION_STATE_SIZE, STATE_SIZE);           
            dxvredef_by_dxvRES = new MatrixFixed(STATE_SIZE, STATE_SIZE);
            dxvredef_by_dxpdefRES = new MatrixFixed(STATE_SIZE, POSITION_STATE_SIZE);
            dxvnorm_by_dxvRES = new MatrixFixed(STATE_SIZE, STATE_SIZE);
            dzeroedxv_by_dxvRES = new MatrixFixed(STATE_SIZE, STATE_SIZE);
            dxpredef_by_dxpRES = new MatrixFixed(POSITION_STATE_SIZE, POSITION_STATE_SIZE);
            dxpredef_by_dxpdefRES = new MatrixFixed(POSITION_STATE_SIZE, POSITION_STATE_SIZE);
        }
Esempio n. 2
0
 private void init()
 {
     m_centre = new Vector(2);
     m_C = new MatrixFixed(3, 3);
     m_Cinv = new MatrixFixed(3, 3);
     m_last_camera = new Vector(3);
     m_last_image_centred = new Vector(2);
 }
Esempio n. 3
0
        //friend class FeatureInitInfo;
        //friend class Scene_Single;

        /// <summary>
        /// Constructor. This is protected since it is only called
        /// from FeatureInitInfo::add_particle()
        /// </summary>
        /// <param name="l">The value(s) for the free parameters \lambda represented by this particle.</param>
        /// <param name="p">The initial probability for this particle</param>
        /// <param name="MEASUREMENT_SIZE">The number of parameters representing a measurement of a feature</param>
        public Particle(Vector l, float p, uint MEASUREMENT_SIZE)
        {
            lambda = new Vector(l); 
            probability = p;

            m_z = new Vector(MEASUREMENT_SIZE);
            m_h = new Vector(MEASUREMENT_SIZE);
            m_SInv = new MatrixFixed(MEASUREMENT_SIZE, MEASUREMENT_SIZE);
        }
Esempio n. 4
0
        // Default NULL version
        public virtual bool make_internal_measurement(Internal_Measurement_Model m,
                                              Vector v,
                                              Vector v2,
                                              MatrixFixed mt)
        {
            Debug.WriteLine("WARNING: make_internal_measurement not implemented.");

            return false;
        }
Esempio n. 5
0
        public void successful_internal_measurement(Vector zv_in)
        {
            //assert(zv.Size() == internal_measurement_model.MEASUREMENT_SIZE);
            zv.Update(zv_in);

            //if (DEBUGDUMP) cout << "Internal measurement made      : zv " << endl << zv << endl;

            successful_internal_measurement_flag = true;

            internal_measurement_model.func_nuv(hv, zv);
            nuv.Update(internal_measurement_model.get_nuvRES());
        }
Esempio n. 6
0
        /// <summary>
        /// Simple overall prediction 
        /// </summary>
        /// <param name="scene"></param>
        /// <param name="u"></param>
        /// <param name="delta_t"></param>
        public void predict_filter_slow (Scene_Single scene, Vector u, float delta_t)
        {
            Debug.WriteLine("*** SLOW PREDICTION ***");

            // What we need to do for the prediction:
     
            //    Calculate f and grad_f_x
            //    Calculate Q
            //    Form x(k+1|k) and P(k+1|k)

            int size = (int)scene.get_total_state_size();

            // First form original total state and covariance
            Vector x = new Vector(size);
            MatrixFixed P = new MatrixFixed(size, size);
            scene.construct_total_state_and_covariance(ref x, ref P);

            // Make model calculations: store results in RES matrices
            Vector xv = scene.get_xv();
            //Vector xv = new Vector(scene.get_xv());
            scene.get_motion_model().func_fv_and_dfv_by_dxv(xv, u, delta_t);
            scene.get_motion_model().func_Q(scene.get_xv(), u, delta_t);

            // Find new state f
            Vector f = new Vector(size);

            // Feature elements of f are the same as x 
            f.Update(x);
            f.Update(scene.get_motion_model().get_fvRES(), 0);

            // Find new P

            // Since most elements of df_by_dx are zero... 
            MatrixFixed df_by_dx = new MatrixFixed(size, size);
            df_by_dx.Fill(0.0f);

            // Fill the rest of the elements of df_by_dx: 1 on diagonal for features
            for (int i = (int)scene.get_motion_model().STATE_SIZE; i < df_by_dx.Rows; i++)
                df_by_dx[i,i] = 1.0f;

            df_by_dx.Update(scene.get_motion_model().get_dfv_by_dxvRES(), 0, 0);

            // Calculate the process noise
            MatrixFixed Q = new MatrixFixed(size, size);
            Q.Fill(0.0f);
            Q.Update(scene.get_motion_model().get_QxRES(), 0, 0);

            P.Update(df_by_dx * P * df_by_dx.Transpose());

            P += Q;

            scene.fill_state_and_covariance(f, P);
        }
Esempio n. 7
0
        //friend class Scene_Single;
        //friend class Kalman;

        public Internal_Measurement(Internal_Measurement_Model i_m_m)
        {
            internal_measurement_model = i_m_m;
            hv = new Vector(internal_measurement_model.MEASUREMENT_SIZE);
            zv = new Vector(internal_measurement_model.MEASUREMENT_SIZE);
            nuv = new Vector(internal_measurement_model.MEASUREMENT_SIZE);

            dhv_by_dxv = new MatrixFixed(internal_measurement_model.MEASUREMENT_SIZE, internal_measurement_model.get_motion_model().STATE_SIZE);
            Rv = new MatrixFixed(internal_measurement_model.MEASUREMENT_SIZE, internal_measurement_model.MEASUREMENT_SIZE);
            Sv = new MatrixFixed(internal_measurement_model.MEASUREMENT_SIZE, internal_measurement_model.MEASUREMENT_SIZE);

            // if (DEBUGDUMP) cout << "Adding Internal Measurement type " 
		    // << internal_measurement_model.internal_type << endl;
        }
Esempio n. 8
0
        public void predict_internal_measurement(Vector xv, MatrixFixed Pxx)
        {
            internal_measurement_model.func_hv_and_dhv_by_dxv(xv);

            hv.Update(internal_measurement_model.get_hvRES());
            dhv_by_dxv.Update(internal_measurement_model.get_dhv_by_dxvRES());

            internal_measurement_model.func_Rv(hv);
            Rv.Update(internal_measurement_model.get_RvRES());

            internal_measurement_model.func_Sv(Pxx, dhv_by_dxv, Rv);
            Sv.Update(internal_measurement_model.get_SvRES());

            //if (DEBUGDUMP) cout << "Internal measurement prediction: hv " << endl 
		    //<< hv << endl;
        }
Esempio n. 9
0
        public SearchDatum(MatrixFixed _PuInv, Vector _search_centre)
        {
            PuInv = _PuInv;
            search_centre = _search_centre;
            result_flag = false;
            result_u = 0;
            result_v = 0;

            halfwidth = (uint)(SceneLib.NO_SIGMA /
                        Math.Sqrt(PuInv[0, 0] - PuInv[0, 1] * PuInv[0, 1] / PuInv[1, 1]));
            halfheight = (uint)(SceneLib.NO_SIGMA /
                         Math.Sqrt(PuInv[1, 1] - PuInv[0, 1] * PuInv[0, 1] / PuInv[0, 0]));

            if (halfwidth > 10) halfwidth = 10;
            if (halfheight > 10) halfheight = 10;
        }
Esempio n. 10
0
        public linefeature(Feature feature1, Feature feature2, int no_of_points, int history)
        {
            marked_for_deletion = false;
            curr_index = 0;
            hits = 0;
            this.feature1 = feature1;
            this.feature2 = feature2;
            this.no_of_points = no_of_points;
            this.history = history;
            pixel_intensity = new Byte[no_of_points, history];
            feature_position = new Vector[history, 2];

            for (int i = 0; i < history; i++)
            {
                feature_position[i, 0] = new Vector(3);
                feature_position[i, 1] = new Vector(3);
            }
        }
Esempio n. 11
0
        public unsafe QR(MatrixFixed M)
        {
            qrdc_out_ = new MatrixFixed(M.Columns, M.Rows);
            qraux_ = new Vector(M.Columns);
            jpvt_ = new int[M.Rows];
            Q_ = null;
            R_ = null;

            // Fill transposed O/P matrix
            int c = M.Columns;
            int r = M.Rows;
            for (int i = 0; i < r; ++i)
                for (int j = 0; j < c; ++j)
                    qrdc_out_[j,i] = M[i,j];

            int do_pivot = 0; // Enable[!=0]/disable[==0] pivoting.
            for (int i = 0; i < jpvt_.Length; i++) jpvt_[i] = 0;

            Vector work = new Vector(M.Rows);

            fixed (float* data = qrdc_out_.Datablock())
            {
                fixed (float* data2 = qraux_.Datablock())
                {
                    fixed (int* data3 = jpvt_)
                    {
                        fixed (float* data4 = work.Datablock())
                        {
                            Netlib.dqrdc_(data,       // On output, UT is R, below diag is mangled Q
                                          &r, &r, &c,
                                          data2,      // Further information required to demangle Q
                                          data3,
                                          data4,
                                          &do_pivot);
                        }
                    }
                }
            }
        }
Esempio n. 12
0
 public override void func_nui(Vector hi, Vector zi)
 {
     nuiRES.Update(zi - hi);
 }
Esempio n. 13
0
        public override void func_hi_noisy(Vector yi_true, Vector xp_true)
        {
            func_hi_and_dhi_by_dxp_and_dhi_by_dyi(yi_true, xp_true);

            hi_noisyRES.Put(0, SceneLib.SampleNormal(hiRES[0], ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).SD_IMAGE_SIMULATION, rnd));
            hi_noisyRES.Put(1, SceneLib.SampleNormal(hiRES[1], ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).SD_IMAGE_SIMULATION, rnd));
        }
Esempio n. 14
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="xp">current robot position state</param>
        /// <param name="yi">3D position of the feature</param>
        /// <param name="xp_orig">robot position state when the feature was initially observed</param>
        /// <param name="hi">image coordinates of the feature</param>
        /// <returns></returns>
        public override uint visibility_test(Vector xp, Vector yi, Vector xp_orig, Vector hi)
        {
            float image_search_boundary = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).IMAGE_SEARCH_BOUNDARY;
            uint wdth = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).m_camera.ImageWidth();
            uint hght = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).m_camera.ImageHeight();
            uint cant_see_flag = 0;
  
            // Test image boundaries 
            float x = hi[0];
            float y = hi[1];
            if ((x < 0.0f + image_search_boundary) ||
                (x > (float)(wdth - 1 - image_search_boundary))) 
            {
                cant_see_flag |= Wide_Camera_Point_Feature_Measurement_Model.LEFT_RIGHT_FAIL;
                //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("Visibility test left / right fail.");
            }
            if ((y < 0.0f + image_search_boundary) ||
                (y > (float)(hght - 1 - image_search_boundary))) 
            {
                cant_see_flag |= Wide_Camera_Point_Feature_Measurement_Model.UP_DOWN_FAIL;
                //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("Visibility test up / down fail.");
            }

            // Do tests on length and angle of predicted view 

            // hLWi is current predicted vector from head to feature in 
            // world frame

            // This function gives relative position of feature
            func_zeroedyi_and_dzeroedyi_by_dxp_and_dzeroedyi_by_dyi(yi, xp);

            // Test the feature's not behind the camera (because projection
            // may do strange things)
            if (zeroedyiRES[2] <= 0) 
            {
                cant_see_flag |= Wide_Camera_Point_Feature_Measurement_Model.BEHIND_CAMERA_FAIL;
                //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("Behind camera fail.");
            }

            ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.func_q(xp);
            RotationMatrix RWR = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.get_qRES().RotationMatrix();

            Vector3D hLWi = new Vector3D(RWR * (new Point3D(zeroedyiRES)));

            // hLWi_orig is vector from head to feature in world frame
            // WHEN THAT FEATURE WAS FIRST MEASURED: i.e. when the image
            // patch was saved
            func_zeroedyi_and_dzeroedyi_by_dxp_and_dzeroedyi_by_dyi(yi, xp_orig);

            ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.func_q(xp_orig);
            RotationMatrix RWR_orig = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.get_qRES().RotationMatrix();
  
            Vector3D hLWi_orig = new Vector3D(RWR_orig * (new Point3D(zeroedyiRES)));

            // Compare hLWi and hLWi_orig for length and orientation
            float mod_hLWi = hLWi.Norm();
            float mod_hLWi_orig = hLWi_orig.Norm();

            float length_ratio = mod_hLWi / mod_hLWi_orig;
            float max_length_ratio = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).MAXIMUM_LENGTH_RATIO;
            if ((length_ratio > max_length_ratio) ||
                 (length_ratio < (1.0f / max_length_ratio))) 
            {
                cant_see_flag |= Wide_Camera_Point_Feature_Measurement_Model.DISTANCE_FAIL;
                //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("Distance fail.");
            }

            float dot_prod = hLWi * hLWi_orig;

            float angle = (float)Math.Acos(dot_prod / (mod_hLWi * mod_hLWi_orig));
            if (angle == float.NaN) Debug.WriteLine("Maths error: " + dot_prod + " / " + (mod_hLWi * mod_hLWi_orig));
            angle = (angle >= 0.0f ? angle : -angle);  // Make angle positive
            if (angle > ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).MAXIMUM_ANGLE_DIFFERENCE) 
            {
                cant_see_flag |= Wide_Camera_Point_Feature_Measurement_Model.ANGLE_FAIL;
                //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("Angle fail.");
            }

            return cant_see_flag;   // 0 if OK, otherwise error code
        }
Esempio n. 15
0
        public override void func_hi_and_dhi_by_dxp_and_dhi_by_dyi(Vector yi, Vector xp)
        {
            // This function gives relative position of feature: also call this hR
            // (vector from camera to feature in robot frame)
            func_zeroedyi_and_dzeroedyi_by_dxp_and_dzeroedyi_by_dyi(yi, xp);

            // Project this from 3D into the 2D image using our camera
            Vector feature_3D_position = get_zeroedyiRES();
            WideCamera cam = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).m_camera;
            hiRES = cam.Project(feature_3D_position);

            // And ask the camera what the Jacobian of this projection was
            MatrixFixed dhid_by_dzeroedyi = cam.ProjectionJacobian();

            // Form the required Jacobians
            dhi_by_dxpRES = dhid_by_dzeroedyi * dzeroedyi_by_dxpRES;
            dhi_by_dyiRES = dhid_by_dzeroedyi * dzeroedyi_by_dyiRES;
        }
Esempio n. 16
0
 public override void func_yigraphics_and_Pyiyigraphics(Vector yi, MatrixFixed Pyiyi)
 {
     yigraphicsRES.Update(yi);
     PyiyigraphicsRES.Update(Pyiyi);
 }
Esempio n. 17
0
 public void func_ri(Vector ypi)
 {
     riRES.SetVNL3(ypi.Extract(3, 0));
 }
Esempio n. 18
0
        /// <summary>
        /// Predict the image location  \vct{h}_i = (x,y)  for
        /// partially-initialised feature with state  \vct{y}_i , given the current
        /// camera location  \vct{x}_p  and the depth parameter  \lambda .
        /// </summary>
        /// <param name="yi"></param>
        /// <param name="xp"></param>
        /// <param name="lambda"></param>
        public override void func_hpi_and_dhpi_by_dxp_and_dhpi_by_dyi(Vector yi, 
                                                             Vector xp,
                                                             Vector lambda)
        {
            
            // This function gives relative position of feature: also call this hR
            // (vector from camera to feature in robot frame)
            func_zeroedyi_and_dzeroedyi_by_dxp_and_dzeroedyi_by_dyi(yi, xp);
            
            // Parameters of vector hLR from camera to feature in robot frame
            // hLR = zeroedri + lambda * zeroedhhati
            // Calculate the vector from the camera to the feature given the current
            // lambda
            Vector hLR = zeroedyiRES.Extract(3, 0) + zeroedyiRES.Extract(3, 3) * lambda[0];
            
            // Project this into the image
            hpiRES = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).m_camera.Project(hLR);
            
            // What is the Jacobian of this projection?
            MatrixFixed dhpi_by_dhLRi = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).m_camera.ProjectionJacobian();
            
            // Calculate the required result Jacobians

            // Now how the vector to the feature depends on the parameterised line
            // (this is a function of lambda)
            float[] b = new float[18];
            b[0] = 1.0f;
            b[1] = 0.0f;
            b[2] = 0.0f;
            b[3] = lambda[0];
            b[4] = 0.0f;
            b[5] = 0.0f;
            b[6] = 0.0f;
            b[7] = 1.0f;
            b[8] = 0.0f;
            b[9] = 0.0f;
            b[10] = lambda[0];
            b[11] = 0.0f;
            b[12] = 0.0f;
            b[13] = 0.0f;
            b[14] = 1.0f;
            b[15] = 0.0f;
            b[16] = 0.0f;
            b[17] = lambda[0];
            MatrixFixed dhLRi_by_dzeroedyi = new MatrixFixed(3, 6, b);
  
            dhpi_by_dxpRES = dhpi_by_dhLRi * dhLRi_by_dzeroedyi * dzeroedyi_by_dxpRES;            
            dhpi_by_dyiRES = dhpi_by_dhLRi * dhLRi_by_dzeroedyi * dzeroedyi_by_dyiRES;
  
            /*
            if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("func_hpi: yi = " + yi + "," +
                                           "xp = " + xp + "," +
                                           "lambda = " + lambda + "," +
                                           "hpiRES = " + hpiRES);
             */
            
        }
Esempio n. 19
0
 public override uint visibility_test(Vector v, Vector v2,
                             Vector v3, Vector v4)
 {
     // Always visible for now
     return 0;
 }
Esempio n. 20
0
        public override void func_zeroedyi_and_dzeroedyi_by_dxp_and_dzeroedyi_by_dyi(Vector yi, Vector xp)
        {
            Wide_Camera_Point_Feature_Measurement_Model wm = (Wide_Camera_Point_Feature_Measurement_Model)wide_model;

            // Extract cartesian and quaternion components of xp
            wm.threed_motion_model.func_r(xp);
            wm.threed_motion_model.func_q(xp);

            // Extract ri and hhati components of yi = ypi
            func_ri(yi);
            func_hhati(yi);

            // ri part: transformation is just the same as in the normal point case
            // zeroedri = RRW(rWi - rW)  //commented out in original code

            // ri - r
            Vector3D yWiminusrW = new Vector3D(riRES - wm.threed_motion_model.get_rRES());

            Quaternion qRW = wm.threed_motion_model.get_qRES().Inverse();
            MatrixFixed dqRW_by_dq = MatrixFixed.dqbar_by_dq();

            // Rotation RRW
            RotationMatrix RRW = qRW.RotationMatrix();

            // RRW(rWi - rW)
            Vector3D zeroedri = new Vector3D(RRW * yWiminusrW);

            // Now calculate Jacobians
            // dzeroedri_by_dri is RRW
            // dzeroedri_by_dhhati = 0
            MatrixFixed dzeroedri_by_dri = new MatrixFixed(RRW);

            // dzeroedyi_by_dxp:
            // dzeroedri_by_dr = -RRW
            // dzeroedri_by_dq = d_dq(RRW (ri - r))
            MatrixFixed dzeroedri_by_dr = RRW * -1.0f;

            MatrixFixed dzeroedri_by_dqRW = MatrixFixed.dRq_times_a_by_dq(qRW, yWiminusrW);
            MatrixFixed dzeroedri_by_dq = dzeroedri_by_dqRW * dqRW_by_dq;

            // Now for the hhati part (easier...)
            // zeroedhhati = RRW hhati
            Vector3D zeroedhhati = new Vector3D(RRW * hhatiRES);

            // Jacobians
            // dzeroedhhati_by_dr = 0
            // dzeroedhhati_by_dq = d_dq(RRW hhati)
            // dzeroedhhati_by_dhhati = RRW
            // dzeroedhhati_by_dri = 0
            MatrixFixed dzeroedhhati_by_dqRW = MatrixFixed.dRq_times_a_by_dq(qRW, hhatiRES);
            MatrixFixed dzeroedhhati_by_dq = dzeroedhhati_by_dqRW * dqRW_by_dq;
            MatrixFixed dzeroedhhati_by_dhhati = new MatrixFixed(RRW);

            // And put it all together
            zeroedyiRES.Update(zeroedri.GetVNL3(), 0);
            zeroedyiRES.Update(zeroedhhati.GetVNL3(), 3);

            //cout << "Line: zeroedri = " << zeroedri << "zeroedhhati = " << zeroedhhati;

            dzeroedyi_by_dxpRES.Fill(0.0f);
            dzeroedyi_by_dxpRES.Update(dzeroedri_by_dr, 0, 0);
            dzeroedyi_by_dxpRES.Update(dzeroedri_by_dq, 0, 3);
            dzeroedyi_by_dxpRES.Update(dzeroedhhati_by_dq, 3, 3);

            dzeroedyi_by_dyiRES.Fill(0.0f);
            dzeroedyi_by_dyiRES.Update(dzeroedri_by_dri, 0, 0);
            dzeroedyi_by_dyiRES.Update(dzeroedhhati_by_dhhati, 3, 3);
              
            
        }
Esempio n. 21
0
        public override void func_zeroedyigraphics_and_Pzeroedyigraphics(Vector yi, Vector xv,
	                    MatrixFixed Pxx, MatrixFixed Pxyi, MatrixFixed Pyiyi)
        {
            ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.func_xp(xv);

            // In this case (where the feature state is the same as the graphics
            // state) zeroedyigraphics is the same as zeroedyi
            func_zeroedyi_and_dzeroedyi_by_dxp_and_dzeroedyi_by_dyi(yi, ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.get_xpRES());
            zeroedyigraphicsRES.Update(zeroedyiRES);

            MatrixFixed dzeroedyigraphics_by_dxv = dzeroedyi_by_dxpRES * ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.get_dxp_by_dxvRES();

            PzeroedyigraphicsRES.Update(dzeroedyigraphics_by_dxv * Pxx * dzeroedyigraphics_by_dxv.Transpose() +
                                        dzeroedyi_by_dyiRES * Pxyi.Transpose() * dzeroedyigraphics_by_dxv.Transpose() +
                                        dzeroedyigraphics_by_dxv * Pxyi * dzeroedyi_by_dyiRES.Transpose() +
                                        dzeroedyi_by_dyiRES * Pyiyi * dzeroedyi_by_dyiRES.Transpose());
        }
Esempio n. 22
0
 public virtual classimage_mono initialise_known_feature(Feature_Measurement_Model f_m_m,
                                             Vector yi, uint known_feature_label, String path) { return (null); }
Esempio n. 23
0
 public override void func_Ri(Vector hi)
 {
     RiRES = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).m_camera.MeasurementNoise(hi);
 }
Esempio n. 24
0
 public virtual classimage_mono initialise_known_feature(Feature_Measurement_Model f_m_m,
                                                    Vector yi, Settings.Section section, String path) { return (null); }
Esempio n. 25
0
        /// <summary>
        /// Partial initialisation function
        /// Calculates the state vector  \vct{y}_{pi}  and Jacobians for a new
        /// partially-initialised feature from an initial observation.
        /// @param hi The initial measurement  \vct{h}_i , in this case the (x,y)
        ///  image location of the feature.
        /// </summary>
        /// <param name="hi"></param>
        /// <param name="xp">The robot position state  \vct{x}_p  from which the initial observation was made.</param>
        public override void func_ypi_and_dypi_by_dxp_and_dypi_by_dhi_and_Ri(Vector hi, Vector xp)
        {
            // Representation of a line here:
            // ypi = (rWi    )
            //       (hLhatWi)

            // Form the ray (in camera co-ordinates by unprojecting this image location
            Vector3D hLRi = new Vector3D(((Wide_Camera_Point_Feature_Measurement_Model)wide_model).m_camera.Unproject(hi));
  
            //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("debug hLRi");

            // Form hLhatRi from hLRi
            // Normalise
            Vector3D hLhatRi = hLRi;
            hLhatRi.Normalise();

            MatrixFixed dhLhatRi_by_dhLRi = MatrixFixed.dvnorm_by_dv(hLRi); 

            //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("debug hLhatRi" + hLhatRi);

            // Now convert this into a direction in world co-ordinates by rotating
            // Form hLhatWi from hLhatRi
            // Rotate
            ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.func_q(xp);
            RotationMatrix RWR = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.get_qRES().RotationMatrix();
            Vector3D hLhatWi = new Vector3D(RWR * hLhatRi);

            //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("debug hLhatWi" + hLhatWi);

            // Extract rW from xp
            ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.func_r(xp);
 
            // And form ypiRES
            ypiRES.Update(((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.get_rRES().GetVNL3(), 0);
            ypiRES.Update(hLhatWi.GetVNL3(), 3);

            //if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("debug ypiRES" + ypiRES);

            // Form Jacobians dypi_by_dxp and dypi_by_dhi

            // dypi_by_dxp = (drWi_by_dr     drWi_by_dq    )
            //               (dhLhatWi_by_dr dhLhatWi_by_dq)
            //             = (I              0             )
            //               (0              dhLhatWi_by_dq)

            // hLhatWi = RWR * hLhatRi
            // => dhLhatWi_by_dq = d/dq ( R(qWR) * hLhatRi)

            MatrixFixed dhLhatWi_by_dq = MatrixFixed.dRq_times_a_by_dq(((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model.get_qRES(), hLhatRi);

            // Put dypi_by_dxp together
            dypi_by_dxpRES.Fill(0.0f);
            dypi_by_dxpRES.Put(0, 0, 1.0f);
            dypi_by_dxpRES.Put(1, 1, 1.0f);
            dypi_by_dxpRES.Put(2, 2, 1.0f);
            dypi_by_dxpRES.Update(dhLhatWi_by_dq, 3, 3);

            //  cout << "dypi_by_dxpRES" <<  dypi_by_dxpRES;


            // dypi_by_dhi = (drWi_by_dhi    )
            //               (dhLhatWi_by_dhi)
            //             = (0              )
            //               (dhLhatWi_by_dhi)

            // hLhatWi = RWR * hLhatRi
            // Need to work out derivative for this
            // dhLhatWi_by_dhi = RWR * dhLhatRi_by_dhLRi * dhLRi_by_dhi

            MatrixFixed dhLhatWi_by_dhi = RWR * dhLhatRi_by_dhLRi * ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).m_camera.UnprojectionJacobian();
  
            dypi_by_dhiRES.Fill(0.0f);
            dypi_by_dhiRES.Update(dhLhatWi_by_dhi, 3, 0);

            //  cout << "dypi_by_dhiRES" <<  dypi_by_dhiRES;

            // And construct Ri
            func_Ri(hi);

            /*
            if (Camera_Constants.DEBUGDUMP) Debug.WriteLine("func_ypi: hi = " + hi + "," +
                                           "xp = " + xp + "," +
                                           "ypiRES = " + ypiRES);
             */
        }
Esempio n. 26
0
 /// <summary>
 /// Make the first measurement of a feature.
 /// </summary>
 /// <param name="z">The location of the feature to measure</param>
 /// <param name="id">The Identifier to fill with the feature measurements</param>
 /// <param name="f_m_m">The feature measurement model to use for this partially-initialised feature</param>
 /// <returns></returns>
 public virtual bool make_initial_measurement_of_feature(Vector z,
                                                  ref classimage_mono id,
                                                  Partially_Initialised_Feature_Measurement_Model f_m_m,
                                                  Vector patch_colour) { return (false); }
Esempio n. 27
0
        /// <summary>
        /// Conversion function: how to turn partially initialised feature ypi
        /// into fully initialised one yfi
        /// </summary>
        /// <param name="ypi"></param>
        /// <param name="lambda"></param>
        public override void func_yfi_and_dyfi_by_dypi_and_dyfi_by_dlambda(Vector ypi, Vector lambda)
        {
            // Simple: lambda is a scalar
            // yfi = ri + lambda hhati   //commented out in original code

            func_ri(ypi);
            func_hhati(ypi);

            float[] a = new float[3];
            a[0] = riRES.GetX() + lambda[0] * hhatiRES.GetX();
            a[1] = riRES.GetY() + lambda[0] * hhatiRES.GetY();
            a[2] = riRES.GetZ() + lambda[0] * hhatiRES.GetZ();
            yfiRES.CopyIn(a); 

            float[] b = new float[18];
            b[0] = 1.0f;
            b[1] = 0.0f;
            b[2] = 0.0f;
            b[3] = lambda[0];
            b[4] = 0.0f;
            b[5] = 0.0f;
            b[6] = 0.0f;
            b[7] = 1.0f;
            b[8] = 0.0f;
            b[9] = 0.0f;
            b[10] = lambda[0];
            b[11] = 0.0f;
            b[12] = 0.0f;
            b[13] = 0.0f;
            b[14] = 1.0f;
            b[15] = 0.0f;
            b[16] = 0.0f;
            b[17] = lambda[0];
            dyfi_by_dypiRES.CopyIn(b);

            dyfi_by_dlambdaRES.Put(0, 0, hhatiRES.GetX());
            dyfi_by_dlambdaRES.Put(1, 0, hhatiRES.GetY());
            dyfi_by_dlambdaRES.Put(2, 0, hhatiRES.GetZ());
        }
Esempio n. 28
0
 // In this case the graphics representation  y_i^{graphics}  and its
 // covariance is the same as the feature state  y_i  and covariance.
 public override void func_yigraphics_and_Pyiyigraphics(Vector yi, MatrixFixed Pyiyi)
 {
     // The graphics representation is the same as the state 
     yigraphicsRES.Update(yi);
     PyiyigraphicsRES.Update(Pyiyi);
 }
Esempio n. 29
0
 public void func_hhati(Vector ypi)
 {
     hhatiRES.SetVNL3(ypi.Extract(3, 3));
 }
Esempio n. 30
0
        public override void func_zeroedyi_and_dzeroedyi_by_dxp_and_dzeroedyi_by_dyi(Vector yi, Vector xp)
        {
            ThreeD_Motion_Model mm = ((Wide_Camera_Point_Feature_Measurement_Model)wide_model).threed_motion_model;

            // Extract cartesian and quaternion components of xp
            mm.func_r(xp);
            mm.func_q(xp);

            Vector3D yWiminusrW = new Vector3D((new Vector3D(yi)) -(mm.get_rRES()));

            Quaternion qRES = mm.get_qRES();
            Quaternion qRW = qRES.Inverse();
            MatrixFixed dqRW_by_dq = MatrixFixed.dqbar_by_dq();

            // Rotation RRW
            RotationMatrix RRW = qRW.RotationMatrix();

            // Position of feature relative to robot in robot frame
            Vector3D zeroedyi = new Vector3D(RRW * yWiminusrW);
            zeroedyiRES.Update(zeroedyi.GetVNL3());

            // Now calculate Jacobians
            // dzeroedyi_by_dyi is RRW
            dzeroedyi_by_dyiRES.Update(RRW);

            // dzeroedyi_by_dxp has 2 partitions:
            // dzeroedyi_by_dr (3 * 3)
            // dzeroedyi_by_dq (3 * 4)
            MatrixFixed dzeroedyi_by_dr = (MatrixFixed)(RRW);
            dzeroedyi_by_dr *= -1.0f;

            //These should be 3x4 dimension
            MatrixFixed dzeroedyi_by_dqRW = MatrixFixed.dRq_times_a_by_dq(qRW, yWiminusrW);
            MatrixFixed dzeroedyi_by_dq = dzeroedyi_by_dqRW * dqRW_by_dq;

            dzeroedyi_by_dxpRES.Update(dzeroedyi_by_dr, 0, 0);
            dzeroedyi_by_dxpRES.Update(dzeroedyi_by_dq, 0, 3);
        }