コード例 #1
0
ファイル: PHDNavigator.cs プロジェクト: afalchetti/monorfs
        /// <summary>
        /// Calculate the set log-likelihood log(P(Z|X, M)), but do not consider visibility
        /// (everything is fully visible). This is used to avoid uninteresting solution to the
        /// optimization problem max_X log(P(Z|X, M)). Also gives the pose gradient at the evaluated pose.
        /// </summary>
        /// <param name="measurements">Sensor measurements in pixel-range form.</param>
        /// <param name="map">Map model.</param>
        /// <param name="pose">Vehicle pose.</param>
        /// <param name="calcgradient">If true, calculate the gradient; otherwise, the gradient param will be null.</param>
        /// <param name="gradient">Log-likelihood gradient wrt. the pose.</param>
        /// <returns>Set log-likelihood.</returns>
        private static double quasiSetLogLikelihood(List <MeasurementT> measurements, IMap map,
                                                    SimulatedVehicle <MeasurerT, PoseT, MeasurementT> pose,
                                                    bool calcgradient, out double[] gradient)
        {
            // exact calculation if there are few components/measurements;
            // use the most probable components approximation otherwise
            SparseMatrix llmatrix = new SparseMatrix(map.Count + measurements.Count, map.Count + measurements.Count, double.NegativeInfinity);
            var          dlldp    = new SparseMatrix <double[]>();

            if (calcgradient)
            {
                dlldp = new SparseMatrix <double[]>(map.Count + measurements.Count, map.Count + measurements.Count, new double[OdoSize]);
            }

            double logPD      = Math.Log(pose.PD);
            double log1PD     = Math.Log(1 - pose.PD);
            double logclutter = Math.Log(pose.ClutterDensity);

            Gaussian[]   zprobs     = new Gaussian[map.Count];
            double[][][] zjacobians = (calcgradient) ? new double[map.Count][][] : null;

            int n = 0;

            foreach (Gaussian landmark in map)
            {
                MeasurementT m       = pose.Measurer.MeasurePerfect(pose.Pose, landmark.Mean);
                double[]     mlinear = m.ToLinear();
                zprobs[n] = new Gaussian(mlinear, pose.MeasurementCovariance, 1);
                n++;
            }

            if (calcgradient)
            {
                n = 0;
                foreach (Gaussian landmark in map)
                {
                    zjacobians[n] = pose.Measurer.MeasurementJacobianP(pose.Pose, landmark.Mean);
                    n++;
                }
            }

            if (calcgradient)
            {
                for (int i = 0; i < zprobs.Length; i++)
                {
                    for (int k = 0; k < measurements.Count; k++)
                    {
                        double[] m = measurements[k].ToLinear();
                        double   d = zprobs[i].Mahalanobis(m);

                        if (d < 12)
                        {
                            // prob = log (pD * zprob(measurement))
                            // this way multiplying probabilities equals to adding (negative) profits
                            llmatrix[i, k] = logPD + Math.Log(zprobs[i].Multiplier) - 0.5 * d * d;
                            dlldp   [i, k] = (m.Subtract(zprobs[i].Mean)).Transpose().
                                             Multiply(zprobs[i].CovarianceInverse).
                                             Multiply(zjacobians[i]).
                                             GetRow(0);
                        }
                    }
                }
            }
            else
            {
                for (int i = 0; i < zprobs.Length; i++)
                {
                    for (int k = 0; k < measurements.Count; k++)
                    {
                        double[] m = measurements[k].ToLinear();
                        double   d = zprobs[i].Mahalanobis(m);

                        if (d < 12)
                        {
                            // prob = log (pD * zprob(measurement))
                            // this way multiplying probabilities equals to adding (negative) profits
                            llmatrix[i, k] = logPD + Math.Log(zprobs[i].Multiplier) - 0.5 * d * d;
                        }
                    }
                }
            }

            for (int i = 0; i < map.Count; i++)
            {
                llmatrix[i, measurements.Count + i] = log1PD;
            }

            for (int i = 0; i < measurements.Count; i++)
            {
                llmatrix[map.Count + i, i] = logclutter;
            }

            List <SparseMatrix> connectedfull = GraphCombinatorics.ConnectedComponents(llmatrix);

            double[]   logcomp    = new double[200];
            double[][] dlogcompdp = (calcgradient) ? new double[200][] : null;
            double     total      = 0;

            gradient = (calcgradient) ? new double[OdoSize] : null;

            for (int i = 0; i < connectedfull.Count; i++)
            {
                int[]                   rows;
                int[]                   cols;
                SparseMatrix            component = connectedfull[i].Compact(out rows, out cols);
                SparseMatrix <double[]> dcomp     = (calcgradient) ? dlldp.Submatrix(rows, cols) : null;

                // fill the (Misdetection x Clutter) quadrant of the matrix with zeros (don't contribute)
                // NOTE this is filled after the connected components partition because
                //      otherwise everything would be connected through this quadrant
                for (int k = 0; k < rows.Length; k++)
                {
                    if (rows[k] >= map.Count)
                    {
                        for (int h = 0; h < cols.Length; h++)
                        {
                            if (cols[h] >= measurements.Count)
                            {
                                component[k, h] = 0;
                            }
                        }
                    }
                }

                IEnumerable <Tuple <int[], double> > assignments;
                bool enumerateall = false;
                if (component.Rows.Count <= 5)
                {
                    assignments  = GraphCombinatorics.LexicographicalPairing(component, map.Count);
                    enumerateall = true;
                }
                else
                {
                    assignments  = GraphCombinatorics.MurtyPairing(component);
                    enumerateall = false;
                }

                int m = 0;

                if (calcgradient)
                {
                    foreach (Tuple <int[], double> assignment in assignments)
                    {
                        if (m >= logcomp.Length || (!enumerateall && logcomp[m] - logcomp[0] < -10))
                        {
                            break;
                        }

                        logcomp   [m] = assignment.Item2;
                        dlogcompdp[m] = new double[OdoSize];

                        for (int p = 0; p < assignment.Item1.Length; p++)
                        {
                            dlogcompdp[m] = dlogcompdp[m].Add(dcomp[p, assignment.Item1[p]]);
                        }

                        m++;
                    }
                }
                else
                {
                    foreach (Tuple <int[], double> assignment in assignments)
                    {
                        if (m >= logcomp.Length || (!enumerateall && logcomp[m] - logcomp[0] < -10))
                        {
                            break;
                        }

                        logcomp[m] = assignment.Item2;
                        m++;
                    }
                }

                total += logcomp.LogSumExp(0, m);

                if (calcgradient)
                {
                    gradient = gradient.Add(dlogcompdp.TemperedAverage(logcomp, 0, m));
                }
            }

            return(total);
        }
コード例 #2
0
ファイル: ISAM2Navigator.cs プロジェクト: afalchetti/monorfs
        /// <summary>
        /// Find the data association labels from the new valid measurements and
        /// the internal previous map model using Mahalanobis association.
        /// </summary>
        /// <param name="measurements">New measurements.</param>
        /// <returns>Association labels.</returns>
        public List <int> FindLabels(List <MeasurementT> measurements)
        {
            if (DAAlgorithm == DataAssociationAlgorithm.Perfect)
            {
                if (!RefVehicle.HasDataAssociation)
                {
                    var exception = new InvalidOperationException("Tried to use perfect data association when none exists.");
                    exception.Data["module"] = "association";

                    throw exception;
                }

                return(RefVehicle.DataAssociation);
            }

            double[][] I             = 0.001.Multiply(Accord.Math.Matrix.Identity(3).ToArray());
            bool[]     keepcandidate = new bool[CandidateMapModel.Count];
            double[][] R;
            Gaussian[] q;
            Gaussian[] qcandidate;

            var        pose             = BestEstimate;
            IndexedMap visible          = new IndexedMap(3);
            List <int> visibleLandmarks = new List <int>();

            for (int i = 0; i < MapModel.Count; i++)
            {
                if (pose.Visible(MapModel[i].Mean))
                {
                    visible.Add(MapModel[i]);
                    visibleLandmarks.Add(i);
                }
            }

            double logPD      = Math.Log(pose.PD);
            double logclutter = Math.Log(pose.ClutterDensity);

            int n = visible.Count + CandidateMapModel.Count;
            int m = visible.Count + CandidateMapModel.Count + measurements.Count;

            // distances(i, k) = distance between landmark i and measurements k
            SparseMatrix distances = new SparseMatrix(n, n, double.NegativeInfinity);

            int candidatecount = CandidateMapModel.Count;

            // candidate count at the beggining of the process
            // this is so the measurements aren't compared with other measurements
            // (if one gets promoted to candidate, the next one could think it's similar to it
            // but that isn't sensible: one landmark -> hopefully one measurement)

            R          = pose.MeasurementCovariance;
            q          = new Gaussian[visible.Count];
            qcandidate = new Gaussian[CandidateMapModel.Count];

            for (int i = 0; i < q.Length; i++)
            {
                Gaussian component = visible[i];

                if (DAAlgorithm == DataAssociationAlgorithm.Mahalanobis)
                {
                    q[i] = new Gaussian(pose.Measurer.MeasurePerfect(pose.Pose, component.Mean).ToLinear(),
                                        plmodel[visibleLandmarks[i]].Covariance,
                                        1.0);
                }
                else
                {
                    q[i] = new Gaussian(component.Mean, I, 1.0);
                }
            }

            for (int i = 0; i < qcandidate.Length; i++)
            {
                Gaussian component = CandidateMapModel[i];

                if (DAAlgorithm == DataAssociationAlgorithm.Mahalanobis)
                {
                    // assume the covariance is zero, since there's nothing better to assume here
                    // note that this is more stringent on the unproven data, as they are given
                    // less leeway for noise than the already associated landmark
                    qcandidate[i] = new Gaussian(pose.Measurer.MeasurePerfect(pose.Pose, component.Mean).ToLinear(),
                                                 R, component.Weight);
                }
                else
                {
                    qcandidate[i] = new Gaussian(component.Mean, I, 1.0);
                }
            }

            Gaussian[] vlandmarks = q.Concatenate(qcandidate);

            for (int i = 0; i < n; i++)
            {
                for (int k = 0; k < measurements.Count; k++)
                {
                    double distance2;

                    if (DAAlgorithm == DataAssociationAlgorithm.Mahalanobis)
                    {
                        distance2 = vlandmarks[i].SquareMahalanobis(measurements[k].ToLinear());
                    }
                    else
                    {
                        distance2 = vlandmarks[i].Mean.SquareEuclidean(pose.Measurer.MeasureToMap(pose.Pose, measurements[k]));
                    }

                    if (distance2 < MatchThreshold * MatchThreshold)
                    {
                        distances[i, k] = logPD + Math.Log(vlandmarks[i].Multiplier) - 0.5 * distance2;
                    }
                }
            }

            for (int i = 0; i < vlandmarks.Length; i++)
            {
                distances[i, measurements.Count + i] = logPD;
            }

            for (int i = 0; i < measurements.Count; i++)
            {
                distances[vlandmarks.Length + i, i] = logclutter;
            }

            // fill the (Misdetection x Clutter) quadrant of the matrix with zeros (don't contribute)
            for (int i = vlandmarks.Length; i < m; i++)
            {
                for (int k = measurements.Count; k < m; k++)
                {
                    distances[i, k] = 0;
                }
            }

            int[] assignments = GraphCombinatorics.LinearAssignment(distances);

            // the assignment vector after removing all the clutter variables
            List <int> labels = new List <int>();

            for (int i = 0; i < measurements.Count; i++)
            {
                labels.Add(int.MinValue);
            }

            // proved landmark
            for (int i = 0; i < visible.Count; i++)
            {
                if (assignments[i] < measurements.Count)
                {
                    labels[assignments[i]] = visibleLandmarks[i];
                }
            }

            // candidate landmark
            for (int i = visible.Count; i < vlandmarks.Length; i++)
            {
                if (assignments[i] < measurements.Count)
                {
                    int k = i - visible.Count;

                    labels[assignments[i]] = -k - 1;
                    // negative labels are for candidates,
                    // note that zero is already occupied by the associated landmarks

                    // improve the estimated landmark by averaging with the new measurement
                    double w = CandidateMapModel[k].Weight;
                    CandidateMapModel[k] =
                        new Gaussian((CandidateMapModel[k].Mean.Multiply(w).Add(
                                          BestEstimate.Measurer.MeasureToMap(BestEstimate.Pose, measurements[assignments[i]]))).Divide(w + 1),
                                     I,
                                     w + 1);

                    // note the comparison between double and int
                    // since the weight is only used with integer values (and addition by one)
                    // there should never be any truncation error, at least while
                    // the number has less than 23/52 bits (for float/double);
                    // this amounts to 8388608 and 4.5e15 so it should be always ok.
                    // In fact, the gtsam key system fails before that
                    // (it uses three bytes for numbering, one for character)
                    if (CandidateMapModel[k].Weight >= NewLandmarkThreshold)
                    {
                        labels[assignments[i]] = NextLabel;
                    }
                    else
                    {
                        keepcandidate[k] = true;
                        // only keep candidates that haven't been added, but are still visible
                    }
                }
            }

            // else: unmatched measurements, add to candidates
            for (int i = 0; i < measurements.Count; i++)
            {
                if (labels[i] == int.MinValue)
                {
                    // if far from everything, generate a new candidate at the measured point
                    // note that the covariance is assumed zero, though that would throw an exception,
                    // as it doesn't have an inverse, so the identity is used instead (dummy value)
                    CandidateMapModel.Add(new Gaussian(BestEstimate.Measurer.MeasureToMap(BestEstimate.Pose, measurements[i]), I, 1));

                    if (NewLandmarkThreshold <= 1)
                    {
                        CandidateMapModel.RemoveAt(CandidateMapModel.Count - 1);
                        labels[i] = NextLabel;
                    }
                }
            }

            // anything that wasn't seen goes away, it was clutter
            for (int i = keepcandidate.Length - 1; i >= 0; i--)
            {
                if (!keepcandidate[i])
                {
                    CandidateMapModel.RemoveAt(i);
                }
            }

            return(labels);
        }
コード例 #3
0
ファイル: PHDNavigator.cs プロジェクト: afalchetti/monorfs
        /// <summary>
        /// Calculate the set log-likelihood log(P(Z|X, M)).
        /// </summary>
        /// <param name="measurements">Sensor measurements in pixel-range form.</param>
        /// <param name="map">Map model.</param>
        /// <param name="pose">Vehicle pose.</param>
        /// <returns>Set log-likelihood.</returns>
        public static double SetLogLikelihood(List <MeasurementT> measurements, IMap map,
                                              SimulatedVehicle <MeasurerT, PoseT, MeasurementT> pose)
        {
            // exact calculation if there are few components/measurements;
            // use the most probable components approximation otherwise
            SparseMatrix        llmatrix      = SetLogLikeMatrix(measurements, map, pose);
            List <SparseMatrix> connectedfull = GraphCombinatorics.ConnectedComponents(llmatrix);

            double[] logcomp = new double[200];
            double   total   = 0;

            for (int i = 0; i < connectedfull.Count; i++)
            {
                int[]        rows;
                int[]        cols;
                SparseMatrix component = connectedfull[i].Compact(out rows, out cols);

                // fill the (Misdetection x Clutter) quadrant of the matrix with zeros (don't contribute)
                // NOTE this is filled after the connected components partition because
                //      otherwise everything would be connected through this quadrant
                for (int k = 0; k < rows.Length; k++)
                {
                    if (rows[k] >= map.Count)
                    {
                        for (int h = 0; h < cols.Length; h++)
                        {
                            if (cols[h] >= measurements.Count)
                            {
                                component[k, h] = 0;
                            }
                        }
                    }
                }

                IEnumerable <Tuple <int[], double> > assignments;
                bool enumerateall = false;
                if (component.Rows.Count <= 5)
                {
                    assignments  = GraphCombinatorics.LexicographicalPairing(component, map.Count);
                    enumerateall = true;
                }
                else
                {
                    assignments  = GraphCombinatorics.MurtyPairing(component);
                    enumerateall = false;
                }

                int m = 0;
                foreach (Tuple <int[], double> assignment in assignments)
                {
                    if (m >= logcomp.Length || (!enumerateall && logcomp[m] - logcomp[0] < -10))
                    {
                        break;
                    }

                    logcomp[m] = assignment.Item2;
                    m++;
                }

                total += logcomp.LogSumExp(0, m);
            }

            return(total);
        }