public static List <MultipartiteWeightedMatch> computeAssociations <T>(List <List <T> > boundingBoxes, ComputeSimilarityFunction <T> similarity) { MultipartiteWeightTensor t = computeSimilarityTensor <T>(boundingBoxes, similarity); MultipartiteWeightedMatching.GreedyMean matching = new MultipartiteWeightedMatching.GreedyMean(); List <MultipartiteWeightedMatch> ret = matching.getMatching(t); return(ret); }
public static List <MultipartiteWeightedMatch> computeGenericPolygonAssociations(List <List <Segment> > polygons) { MultipartiteWeightTensor t = computeSimilarityTensor(polygons); MultipartiteWeightedMatching.GreedyMean matching = new MultipartiteWeightedMatching.GreedyMean(); List <MultipartiteWeightedMatch> ret = matching.getMatching(t); return(ret); }
public static List <MultipartiteWeightedMatch> computeBoundingBoxAssociations(List <List <BoundingBox> > boundingBoxes) { MultipartiteWeightTensor t = computeSimilarityTensor(boundingBoxes); MultipartiteWeightedMatching.GreedyMean matching = new MultipartiteWeightedMatching.GreedyMean(); List <MultipartiteWeightedMatch> ret = matching.getMatching(t); return(ret); }
public static List <MultipartiteWeightedMatch> AssociateTracklets(List <MultiObjectTrackingResult> compressedResults) { // multipartitie weighted matching // multipartite matching to cluster tracks CompressedTrackSimilarityMetric.ICompressedTrackSimilarityMetric metric = new CompressedTrackSimilarityMetric.TubeletIoU(); MultipartiteWeightedMatching.GreedyMean mwmg = new MultipartiteWeightedMatching.GreedyMean(); MultipartiteWeightTensor weightTensor = CompressedTrackSimilarityMetric.computeTrackSimilarityTensor(compressedResults, metric); List <MultipartiteWeightedMatch> association = mwmg.getMatching(weightTensor); return(association); }
public static MultipartiteWeightTensor computeSimilarityTensor <T>(List <List <T> > objs, ComputeSimilarityFunction <T> similarity) { MultipartiteWeightTensor ret = new MultipartiteWeightTensor(objs.Count); for (int i = 0; i < ret.noParts; i++) { ret.setNumPartitionElements(i, objs[i].Count); } for (int i = 0; i < ret.noParts - 1; i++) { for (int j = i + 1; j < ret.noParts; j++) { double[,] sim = computeSimilarities <T>(objs[i], objs[j], similarity); ret.setWeightMatrix(i, j, sim); } } return(ret); }
public static MultipartiteWeightTensor computeSimilarityTensor(List <List <Segment> > polygons) { MultipartiteWeightTensor ret = new MultipartiteWeightTensor(polygons.Count); for (int i = 0; i < ret.noParts; i++) { ret.setNumPartitionElements(i, polygons[i].Count); } for (int i = 0; i < ret.noParts - 1; i++) { for (int j = i + 1; j < ret.noParts; j++) { double[,] sim = computeSimilarities(polygons[i], polygons[j]); ret.setWeightMatrix(i, j, sim); } } return(ret); }
public static MultipartiteWeightTensor computeTrackSimilarityTensor(List <MultiObjectTrackingResult> cts_list, ICompressedTrackSimilarityMetric metric) { MultipartiteWeightTensor ret = new MultipartiteWeightTensor(cts_list.Count); for (int i = 0; i < cts_list.Count; i++) { ret.setNumPartitionElements(i, cts_list[i].tracks.Count); } for (int i = 0; i < cts_list.Count; i++) { double[,] sim = computeTrackSimilarityMatrix(cts_list[i], cts_list[i], metric); ret.setWeightMatrix(i, i, sim); for (int j = i + 1; j < cts_list.Count; j++) { sim = computeTrackSimilarityMatrix(cts_list[i], cts_list[j], metric); ret.setWeightMatrix(i, j, sim); double[,] sim_t = MatrixOperations.Transpose(sim); ret.setWeightMatrix(j, i, sim_t); } } return(ret); }
public List <MultipartiteWeightedMatch> getMatching(MultipartiteWeightTensor weightTensor, double MergeLowerBound) { List <MultipartiteWeightedMatch> match_list = new List <MultipartiteWeightedMatch>(); //each dictionary is a set to be merged heirchically List <Dictionary <int, int> > sets = new List <Dictionary <int, int> >(); //initially all tracks are individually kept in the match list for (int i = 0; i < weightTensor.noParts; i++) { for (int j = 0; j < weightTensor.getNumPartitionElements(i); j++) { Dictionary <int, int> set = new Dictionary <int, int>(); set.Add(i, j); sets.Add(set); } } //now we heirchically merge two most similar sets //until there is no longer any mergable subset bool merged = false; int iterations = 0; do { merged = false; int set1Index = -1; int set2Index = -1; double maxSimilarlity = 0; for (int i = 0; i < sets.Count - 1; i++) { /* if(i==5 && iterations==1) * { * Console.WriteLine("Here"); * }*/ Dictionary <int, int> set1 = sets[i]; for (int j = i + 1; j < sets.Count; j++) { Dictionary <int, int> set2 = sets[j]; if (!setsAreMergable(set1, set2)) { continue; } double sim = computeInterParitionWeight(set1, set2, weightTensor); if (sim > maxSimilarlity) { set1Index = i; set2Index = j; maxSimilarlity = sim; } } } if (maxSimilarlity > MergeLowerBound) { Dictionary <int, int> set1 = sets[set1Index]; Dictionary <int, int> set2 = sets[set2Index]; //first merge set2 into set1 foreach (KeyValuePair <int, int> entry in set2) { set1.Add(entry.Key, entry.Value); } sets.Remove(set2); merged = true; } iterations++; } while (merged); foreach (Dictionary <int, int> set in sets) { MultipartiteWeightedMatch m = new MultipartiteWeightedMatch(); List <int> partitionList = set.Keys.ToList(); partitionList.Sort(); foreach (int part in partitionList) { m.update(part, set[part]); } for (int i = 0; i < partitionList.Count - 1; i++) { for (int j = i + 1; j < partitionList.Count; j++) { double val = weightTensor.getWeight(partitionList[i], m.elementList[partitionList[i]], partitionList[j], m.elementList[partitionList[j]]); m.updateWeight(partitionList[i], partitionList[j], val); } } match_list.Add(m); } return(match_list); }
public List <MultipartiteWeightedMatch> getMatching(MultipartiteWeightTensor weightTensor) { List <MultipartiteWeightedMatch> match_list = new List <MultipartiteWeightedMatch>(); //each dictionary is a set to be merged heirchically List <Dictionary <int, int> > sets = new List <Dictionary <int, int> >(); //this is the paritition //initially all tracks are individually kept in the match list //e.g. {0-0},{0-1},{0-2},{1-0},{1-1},{1-2},....,{1-4},{2-0},... //each element is a set in the paritition for (int i = 0; i < weightTensor.noParts; i++) { for (int j = 0; j < weightTensor.getNumPartitionElements(i); j++) { Dictionary <int, int> set = new Dictionary <int, int>(); set.Add(i, j); sets.Add(set); } } //now we heirchically merge two most similar sets //until there is no longer any mergable subset bool merged = false; int iterations = 0; do { merged = false; int set1Index = -1; int set2Index = -1; double maxSimilarlity = 0; for (int i = 0; i < sets.Count - 1; i++) { Dictionary <int, int> set1 = sets[i]; for (int j = i + 1; j < sets.Count; j++) { Dictionary <int, int> set2 = sets[j]; if (!setsAreMergable(set1, set2)) //check if the sets are mergable (they do not have an input set in common) e.g. {1-1} and {1-2} cannot be merged since they are both from 1. { continue; } double sim = computeInterParitionWeight(set1, set2, weightTensor); if (sim > maxSimilarlity) { set1Index = i; set2Index = j; maxSimilarlity = sim; } } } if (maxSimilarlity > 0) //if two mergable sets were found then merge them! { Dictionary <int, int> set1 = sets[set1Index]; Dictionary <int, int> set2 = sets[set2Index]; //first merge set2 into set1 foreach (KeyValuePair <int, int> entry in set2) { set1.Add(entry.Key, entry.Value); } sets.Remove(set2); merged = true; } iterations++; } while (merged); foreach (Dictionary <int, int> set in sets) { MultipartiteWeightedMatch m = new MultipartiteWeightedMatch(); List <int> partitionList = set.Keys.ToList(); partitionList.Sort(); foreach (int part in partitionList) { m.update(part, set[part]); } for (int i = 0; i < partitionList.Count - 1; i++) { for (int j = i + 1; j < partitionList.Count; j++) { double val = weightTensor.getWeight(partitionList[i], m.elementList[partitionList[i]], partitionList[j], m.elementList[partitionList[j]]); m.updateWeight(partitionList[i], partitionList[j], val); } } match_list.Add(m); } return(match_list); }
//the similarity between two sets is the average similarity between all their elements double computeInterParitionWeight(Dictionary <int, int> set1, Dictionary <int, int> set2, MultipartiteWeightTensor weightTensor) { double sum = 0; int no = 0; foreach (KeyValuePair <int, int> entry1 in set1) { int part1 = entry1.Key; int node1 = entry1.Value; foreach (KeyValuePair <int, int> entry2 in set2) { int part2 = entry2.Key; int node2 = entry2.Value; double weight = weightTensor.getWeight(part1, node1, part2, node2); sum += weight; no++; } } if (no > 0) { sum = sum / no; } return(sum); }