/// <summary> /// Construct a PHDNavigator using the indicated vehicle as a reference. /// </summary> /// <param name="vehicle">Vehicle to track.</param> /// <param name="onlymapping">If true, don't do SLAM, but mapping /// (i.e. the localization is assumed known). Currently not used.</param> public ISAM2Navigator(Vehicle <MeasurerT, PoseT, MeasurementT> vehicle, bool onlymapping = false) : base(vehicle, onlymapping) { int nmeasurement = vehicle.MeasurementCovariance.Length; int nmotion = vehicle.MotionCovariance.Length; double[] measurementnoise = new double[nmeasurement]; double[] motionnoise = new double[nmotion]; double[] mparams = vehicle.Measurer.ToLinear(); // NOTE the iSAM2 interface assumes independent noise in each component, // discarding every entry that's not in the diagonal; // although it is possible to define a complete matrix, // it doesn't seem to be worth it when comparing performance for (int i = 0; i < nmeasurement; ++i) { measurementnoise[i] = Math.Sqrt(vehicle.MeasurementCovariance[i][i]); } for (int i = 0; i < nmotion; ++i) { motionnoise[i] = MotionDt * Math.Sqrt(vehicle.MotionCovariance[i][i]); } bestestimate = new TrackVehicle <MeasurerT, PoseT, MeasurementT>(); MapModel = new IndexedMap(3); plmodel = new IndexedMap(3); CandidateMapModel = new IndexedMap(3); BestEstimate.Pose = vehicle.Pose.DClone(); handle = NewNavigator(vehicle.Pose.State, measurementnoise, motionnoise, mparams); nextlabel = 0; }
/// <summary> /// Retrieve the submap of all the landmarks that match a condition. /// </summary> /// <param name="match">Match condition.</param> /// <returns>Filtered submap.</returns> public IMap FindAll(Predicate <Gaussian> match) { IndexedMap filtered = new IndexedMap(Dimensions); filtered.landmarks = this.landmarks.FindAll(match); return(filtered); }
/// <summary> /// Get the map estimate. /// </summary> /// <param name="plmodel">Side model that contains the covariances of the /// pose-landmark pairs projected into the measurement space. /// Takes the form E = JSJ^T + R, where /// J is the measurement jacobian in both the pose and landmark directions, /// S is the joint covariance of the best estimate current pose and the landmark and /// R is the measurement noise. /// </param> /// <returns>Map estimate as points with certain covariance (gaussians).</returns> private unsafe IndexedMap GetMapModel(out IndexedMap plmodel) { int length; Gaussian component; IndexedMap mapmodel = new IndexedMap(3); plmodel = new IndexedMap(3); double *ptrmapmodel = (double *)ISAM2Lib.getmapmodel(handle, out length); double *ptrmapcov = (double *)ISAM2Lib.getmapcovariances(handle, out length); double *ptrplcov = (double *)ISAM2Lib.getplcovariances(handle, out length); // main model: mean and covariance for (int i = 0, k = 0, h = 0; i < length; i++, k += 3, h += 9) { double[] mean = new double[3]; double[][] covariance = { new double[3], new double[3], new double[3] }; mean[0] = ptrmapmodel[k + 0]; mean[1] = ptrmapmodel[k + 1]; mean[2] = ptrmapmodel[k + 2]; covariance[0][0] = ptrmapcov[h + 0]; covariance[0][1] = ptrmapcov[h + 1]; covariance[0][2] = ptrmapcov[h + 2]; covariance[1][0] = ptrmapcov[h + 3]; covariance[1][1] = ptrmapcov[h + 4]; covariance[1][2] = ptrmapcov[h + 5]; covariance[2][0] = ptrmapcov[h + 6]; covariance[2][1] = ptrmapcov[h + 7]; covariance[2][2] = ptrmapcov[h + 8]; component = new Gaussian(mean, covariance, 1.0); mapmodel.Add(component); } // side model: covariance projected into measurement space // includes pose uncertainty and measurement noise for (int i = 0, h = 0; i < length; i++, h += 9) { double[][] covariance = { new double[3], new double[3], new double[3] }; covariance[0][0] = ptrplcov[h + 0]; covariance[0][1] = ptrplcov[h + 1]; covariance[0][2] = ptrplcov[h + 2]; covariance[1][0] = ptrplcov[h + 3]; covariance[1][1] = ptrplcov[h + 4]; covariance[1][2] = ptrplcov[h + 5]; covariance[2][0] = ptrplcov[h + 6]; covariance[2][1] = ptrplcov[h + 7]; covariance[2][2] = ptrplcov[h + 8]; component = new Gaussian(new double[3], covariance, 1.0); plmodel.Add(component); } return(mapmodel); }
/// <summary> /// Find the data association labels from the new valid measurements and /// the internal previous map model using Mahalanobis association. /// </summary> /// <param name="measurements">New measurements.</param> /// <returns>Association labels.</returns> public List <int> FindLabels(List <MeasurementT> measurements) { if (DAAlgorithm == DataAssociationAlgorithm.Perfect) { if (!RefVehicle.HasDataAssociation) { var exception = new InvalidOperationException("Tried to use perfect data association when none exists."); exception.Data["module"] = "association"; throw exception; } return(RefVehicle.DataAssociation); } double[][] I = 0.001.Multiply(Accord.Math.Matrix.Identity(3).ToArray()); bool[] keepcandidate = new bool[CandidateMapModel.Count]; double[][] R; Gaussian[] q; Gaussian[] qcandidate; var pose = BestEstimate; IndexedMap visible = new IndexedMap(3); List <int> visibleLandmarks = new List <int>(); for (int i = 0; i < MapModel.Count; i++) { if (pose.Visible(MapModel[i].Mean)) { visible.Add(MapModel[i]); visibleLandmarks.Add(i); } } double logPD = Math.Log(pose.PD); double logclutter = Math.Log(pose.ClutterDensity); int n = visible.Count + CandidateMapModel.Count; int m = visible.Count + CandidateMapModel.Count + measurements.Count; // distances(i, k) = distance between landmark i and measurements k SparseMatrix distances = new SparseMatrix(n, n, double.NegativeInfinity); int candidatecount = CandidateMapModel.Count; // candidate count at the beggining of the process // this is so the measurements aren't compared with other measurements // (if one gets promoted to candidate, the next one could think it's similar to it // but that isn't sensible: one landmark -> hopefully one measurement) R = pose.MeasurementCovariance; q = new Gaussian[visible.Count]; qcandidate = new Gaussian[CandidateMapModel.Count]; for (int i = 0; i < q.Length; i++) { Gaussian component = visible[i]; if (DAAlgorithm == DataAssociationAlgorithm.Mahalanobis) { q[i] = new Gaussian(pose.Measurer.MeasurePerfect(pose.Pose, component.Mean).ToLinear(), plmodel[visibleLandmarks[i]].Covariance, 1.0); } else { q[i] = new Gaussian(component.Mean, I, 1.0); } } for (int i = 0; i < qcandidate.Length; i++) { Gaussian component = CandidateMapModel[i]; if (DAAlgorithm == DataAssociationAlgorithm.Mahalanobis) { // assume the covariance is zero, since there's nothing better to assume here // note that this is more stringent on the unproven data, as they are given // less leeway for noise than the already associated landmark qcandidate[i] = new Gaussian(pose.Measurer.MeasurePerfect(pose.Pose, component.Mean).ToLinear(), R, component.Weight); } else { qcandidate[i] = new Gaussian(component.Mean, I, 1.0); } } Gaussian[] vlandmarks = q.Concatenate(qcandidate); for (int i = 0; i < n; i++) { for (int k = 0; k < measurements.Count; k++) { double distance2; if (DAAlgorithm == DataAssociationAlgorithm.Mahalanobis) { distance2 = vlandmarks[i].SquareMahalanobis(measurements[k].ToLinear()); } else { distance2 = vlandmarks[i].Mean.SquareEuclidean(pose.Measurer.MeasureToMap(pose.Pose, measurements[k])); } if (distance2 < MatchThreshold * MatchThreshold) { distances[i, k] = logPD + Math.Log(vlandmarks[i].Multiplier) - 0.5 * distance2; } } } for (int i = 0; i < vlandmarks.Length; i++) { distances[i, measurements.Count + i] = logPD; } for (int i = 0; i < measurements.Count; i++) { distances[vlandmarks.Length + i, i] = logclutter; } // fill the (Misdetection x Clutter) quadrant of the matrix with zeros (don't contribute) for (int i = vlandmarks.Length; i < m; i++) { for (int k = measurements.Count; k < m; k++) { distances[i, k] = 0; } } int[] assignments = GraphCombinatorics.LinearAssignment(distances); // the assignment vector after removing all the clutter variables List <int> labels = new List <int>(); for (int i = 0; i < measurements.Count; i++) { labels.Add(int.MinValue); } // proved landmark for (int i = 0; i < visible.Count; i++) { if (assignments[i] < measurements.Count) { labels[assignments[i]] = visibleLandmarks[i]; } } // candidate landmark for (int i = visible.Count; i < vlandmarks.Length; i++) { if (assignments[i] < measurements.Count) { int k = i - visible.Count; labels[assignments[i]] = -k - 1; // negative labels are for candidates, // note that zero is already occupied by the associated landmarks // improve the estimated landmark by averaging with the new measurement double w = CandidateMapModel[k].Weight; CandidateMapModel[k] = new Gaussian((CandidateMapModel[k].Mean.Multiply(w).Add( BestEstimate.Measurer.MeasureToMap(BestEstimate.Pose, measurements[assignments[i]]))).Divide(w + 1), I, w + 1); // note the comparison between double and int // since the weight is only used with integer values (and addition by one) // there should never be any truncation error, at least while // the number has less than 23/52 bits (for float/double); // this amounts to 8388608 and 4.5e15 so it should be always ok. // In fact, the gtsam key system fails before that // (it uses three bytes for numbering, one for character) if (CandidateMapModel[k].Weight >= NewLandmarkThreshold) { labels[assignments[i]] = NextLabel; } else { keepcandidate[k] = true; // only keep candidates that haven't been added, but are still visible } } } // else: unmatched measurements, add to candidates for (int i = 0; i < measurements.Count; i++) { if (labels[i] == int.MinValue) { // if far from everything, generate a new candidate at the measured point // note that the covariance is assumed zero, though that would throw an exception, // as it doesn't have an inverse, so the identity is used instead (dummy value) CandidateMapModel.Add(new Gaussian(BestEstimate.Measurer.MeasureToMap(BestEstimate.Pose, measurements[i]), I, 1)); if (NewLandmarkThreshold <= 1) { CandidateMapModel.RemoveAt(CandidateMapModel.Count - 1); labels[i] = NextLabel; } } } // anything that wasn't seen goes away, it was clutter for (int i = keepcandidate.Length - 1; i >= 0; i--) { if (!keepcandidate[i]) { CandidateMapModel.RemoveAt(i); } } return(labels); }