public static void generateVideoForEvaluation(List <Image> Images, List <DateTime> dateTimeList, MultiObjectTrackingResult ctts, String videoName, String directory) { List <Image> imagesWithTracks = new List <Image>(); for (int i = 0; i < Images.Count; i++) { List <BoundingBox> locations = new List <BoundingBox>(); List <string> labels = new List <string>(); Dictionary <string, List <bool> > attributes = new Dictionary <string, List <bool> >(); attributes.Add("occlusion", new List <bool>()); if (ctts.tracks.Count != 0) { foreach (string key in ctts.tracks[0].booleanAttributeTracks.Keys) { if (!attributes.ContainsKey(key)) { attributes.Add(key, new List <bool>()); } } } List <int> idx = new List <int>(); for (int j = 0; j < ctts.tracks.Count; j++) { CompressedTrack ct = ctts.tracks[j]; SpaceTime st = ct.getSpaceTimeAt(dateTimeList[i]); BooleanAttribute outofview_attr = ct.getAttributeAt("outofview", dateTimeList[i]); if (st != null && outofview_attr != null && !outofview_attr.value) { BoundingBox l = st.region; locations.Add(l); labels.Add(ctts.tracks[j].label); foreach (string key in attributes.Keys) { attributes[key].Add(ct.getAttributeAt(key, dateTimeList[i]).value); } idx.Add(j); } } Image new_image = generateTrackImage(Images[i], labels, locations, attributes, idx); imagesWithTracks.Add(new_image); } Console.WriteLine("Saving " + directory + "\\" + videoName); FFMpegWrappers.generateVideoFromFrames(imagesWithTracks, videoName, directory); //Directory.CreateDirectory(directory + "\\" + videoName); //Console.WriteLine("Saving " + directory + "\\" + videoName); //for (int i = 0; i < imagesWithTracks.Count; i++) //{ // imagesWithTracks[i].Save(directory + "\\" + videoName + "\\img" + i.ToString("000") + ".jpg"); //} //Console.WriteLine("done"); }
public double getMetric(CompressedTrack t1, CompressedTrack t2, int fps) { TimeSpan dt = new TimeSpan(0, 0, 0, 0, (int)Math.Floor(1000.0 / fps)); DateTime startTime1 = t1.startTime; DateTime startTime2 = t2.startTime; DateTime endTime1 = t1.endTime; DateTime endTime2 = t2.endTime; //is there any overlap at all? if (startTime1 > endTime2 || startTime2 > endTime1) { return(0); } //find overlap times DateTime commonStartTime = startTime1; if (startTime2 > startTime1) { commonStartTime = startTime2; } DateTime commonEndTime = endTime1; if (endTime2 < endTime1) { commonEndTime = endTime2; } if (commonEndTime - commonStartTime < dt) { return(0); } //now compute the integral double sum = 0; for (DateTime t = commonStartTime; t <= commonEndTime; t += dt) { SpaceTime l1 = t1.getSpaceTimeAt(t); SpaceTime l2 = t2.getSpaceTimeAt(t); double overlap = BoundingBox.ComputeOverlapAreaFraction(l1.region, l2.region); sum += (overlap * dt.Milliseconds); //pixel-sec } TimeSpan deltat1 = endTime1 - startTime1; TimeSpan deltat2 = endTime2 - startTime2; double max_ms = Math.Max((double)deltat1.TotalMilliseconds, (double)deltat2.TotalMilliseconds); return(sum / max_ms); }
public static double[,] computeTrackSimilarityMatrix(MultiObjectTrackingResult cts1, MultiObjectTrackingResult cts2, ICompressedTrackSimilarityMetric metric) { double[,] ret = new double[cts1.tracks.Count, cts2.tracks.Count]; for (int i = 0; i < cts1.tracks.Count; i++) { CompressedTrack t1 = cts1.tracks[i]; for (int j = 0; j < cts2.tracks.Count; j++) { CompressedTrack t2 = cts2.tracks[j]; ret[i, j] = metric.getMetric(t1, t2); } } return(ret); }
public static MultiObjectTrackingResult stitchAllChunksAllObjectsOfOneVideo(List <SatyamAggregatedResultsTableEntry> entries, out List <string> ImageURLs, out int fps) { ImageURLs = new List <string>(); fps = 0; if (entries.Count == 0) { return(null); } SatyamJobStorageAccountAccess satyamStorage = new SatyamJobStorageAccountAccess(); MultiObjectTrackingResult stitched = new MultiObjectTrackingResult(); int totalFrameCounts = 0; // ensure the order is correct SortedDictionary <int, List <SatyamAggregatedResultsTableEntry> > sortedEntries = new SortedDictionary <int, List <SatyamAggregatedResultsTableEntry> >(); List <int> idx = new List <int>(); for (int i = 0; i < entries.Count; i++) { SatyamAggregatedResultsTableEntry entry = entries[i]; SatyamAggregatedResult satyamAggResult = JSonUtils.ConvertJSonToObject <SatyamAggregatedResult>(entry.ResultString); SatyamTask aggTask = JSonUtils.ConvertJSonToObject <SatyamTask>(satyamAggResult.TaskParameters); string video = URIUtilities.localDirectoryNameFromURI(aggTask.SatyamURI); string[] fields = video.Split('_'); int startingFrame = Convert.ToInt32(fields[fields.Length - 1]); if (!sortedEntries.ContainsKey(startingFrame)) { sortedEntries.Add(startingFrame, new List <SatyamAggregatedResultsTableEntry>()); idx.Add(startingFrame); } sortedEntries[startingFrame].Add(entries[i]); } idx.Sort(); List <string> AggObjIds = new List <string>(); for (int i = 0; i < idx.Count; i++) { int noFramesOverlap = 0; string blobDir = ""; // grouping all objects that belong to the same chunk MultiObjectTrackingResult aggTracksOfAllObjectsPerChunk = new MultiObjectTrackingResult(); List <string> objIds = new List <string>(); for (int j = 0; j < sortedEntries[idx[i]].Count; j++) { SatyamAggregatedResultsTableEntry entry = sortedEntries[idx[i]][j]; SatyamAggregatedResult satyamAggResult = JSonUtils.ConvertJSonToObject <SatyamAggregatedResult>(entry.ResultString); SatyamTask aggTask = JSonUtils.ConvertJSonToObject <SatyamTask>(satyamAggResult.TaskParameters); MultiObjectTrackingSubmittedJob job = JSonUtils.ConvertJSonToObject <MultiObjectTrackingSubmittedJob>(aggTask.jobEntry.JobParameters); if (job.ChunkOverlap != 0.0) { noFramesOverlap = (int)(job.ChunkOverlap * job.FrameRate); } fps = job.FrameRate; blobDir = URIUtilities.localDirectoryFullPathFromURI(aggTask.SatyamURI); string objId = URIUtilities.filenameFromURINoExtension(aggTask.SatyamURI); objIds.Add(objId); TrackletLabelingAggregatedResult aggresult = JSonUtils.ConvertJSonToObject <TrackletLabelingAggregatedResult>(satyamAggResult.AggregatedResultString); WebClient wb = new WebClient(); Stream aggTrackStream = wb.OpenRead(aggresult.AggregatedTrackletsString_URL); StreamReader reader = new StreamReader(aggTrackStream); String aggTrackString = reader.ReadToEnd(); MultiObjectTrackingResult aggTracks = JSonUtils.ConvertJSonToObject <MultiObjectTrackingResult>(aggTrackString); if (aggTracksOfAllObjectsPerChunk.tracks.Count == 0) { aggTracksOfAllObjectsPerChunk = aggTracks; } else { for (int k = 0; k < aggTracks.tracks.Count; k++) { aggTracksOfAllObjectsPerChunk.tracks.Add(aggTracks.tracks[k]); } } } List <string> TraceURLs = satyamStorage.getURLListOfSpecificExtensionUnderSubDirectoryByURI(blobDir, new List <string>() { "jpg", "png" }); if (i == 0) { ImageURLs.AddRange(TraceURLs); stitched = aggTracksOfAllObjectsPerChunk; totalFrameCounts += TraceURLs.Count; AggObjIds = objIds; } else { int noNewFrames = 0; for (int j = noFramesOverlap; j < TraceURLs.Count; j++) { ImageURLs.Add(TraceURLs[j]); noNewFrames++; } //stitched = MultiObjectTrackingAnalyzer.stitchTwoTracesByTubeletsOfOverlappingVideoChunk(stitched, // aggTracksOfAllObjectsPerChunk, totalFrameCounts, totalFrameCounts + noNewFrames, noFramesOverlap, fps); // postpone the agg trace double frameTimeInMiliSeconds = (double)1000 / (double)fps; double timeToPostponeInMilliSeconds = (double)(totalFrameCounts - noFramesOverlap) * frameTimeInMiliSeconds; //TimeSpan timeSpanToPostponeInSeconds = new TimeSpan(0,0,0,0, (int)timeToPostponeInMilliSeconds); TimeSpan timeSpanToPostponeInSeconds = DateTimeUtilities.getTimeSpanFromTotalMilliSeconds((int)timeToPostponeInMilliSeconds); aggTracksOfAllObjectsPerChunk.postpone(timeSpanToPostponeInSeconds); // overlap must be 0 for (int k = 0; k < aggTracksOfAllObjectsPerChunk.tracks.Count; k++) { if (!AggObjIds.Contains(objIds[k])) { stitched.tracks.Add(aggTracksOfAllObjectsPerChunk.tracks[k]); AggObjIds.Add(objIds[k]); } else { // stitch the track for the same id int tckIdx = AggObjIds.IndexOf(objIds[k]); stitched.tracks[tckIdx] = CompressedTrack.stitchTwoAdjacentTrack(stitched.tracks[tckIdx], aggTracksOfAllObjectsPerChunk.tracks[k]); } } totalFrameCounts += noNewFrames; } //debug //generateVideoForEvaluation(ImageURLs, stitched, directoryName + "_" + i, videoName, fps); } return(stitched); }
public double getMetric(CompressedTrack t1, CompressedTrack t2) { return(getMetric(t1, t2, 20)); //20 frames-per-sec is default }
public double getMetric(CompressedTrack t1, CompressedTrack t2) { return(getMetric(t1, t2, 20)); }
public double getMetric(CompressedTrack t1, CompressedTrack t2, int fps) { TimeSpan dt = new TimeSpan(0, 0, 0, 0, (int)Math.Floor(1000.0 / fps)); DateTime startTime1 = t1.startTime; DateTime startTime2 = t2.startTime; DateTime endTime1 = t1.endTime; DateTime endTime2 = t2.endTime; //is there any overlap at all? if (startTime1 > endTime2 || startTime2 > endTime1) { return(0); } //find overlap times DateTime commonStartTime = startTime1; if (startTime2 > startTime1) { commonStartTime = startTime2; } DateTime commonEndTime = endTime1; if (endTime2 < endTime1) { commonEndTime = endTime2; } if (commonEndTime - commonStartTime < dt) { return(0); } //now compute the integral double IntersectionVolume = 0; for (DateTime t = commonStartTime; t <= commonEndTime; t += dt) { SpaceTime l1 = t1.getSpaceTimeAt(t); SpaceTime l2 = t2.getSpaceTimeAt(t); double overlap = BoundingBox.ComputeOverlapArea(l1.region, l2.region); IntersectionVolume += (overlap * dt.Milliseconds); } double UnionVolume = 0; for (DateTime t = startTime1; t <= endTime1; t += dt) { SpaceTime l1 = t1.getSpaceTimeAt(t); UnionVolume += (l1.region.ComputeArea() * dt.Milliseconds); } for (DateTime t = startTime2; t <= endTime2; t += dt) { SpaceTime l2 = t2.getSpaceTimeAt(t); UnionVolume += (l2.region.ComputeArea() * dt.Milliseconds); } UnionVolume -= IntersectionVolume; return(IntersectionVolume / UnionVolume); }
// if the stitched video is too long public static void generateVideoFramesForEvaluation(List <string> ImageURLs, MultiObjectTrackingResult ctts, String directory, String videoName, int fps) { if (Directory.Exists(directory + "\\" + videoName)) { return; } Directory.CreateDirectory(directory + "\\" + videoName); Console.WriteLine("Saving " + directory + "\\" + videoName); //List<Image> imageList = new List<Image>(); var wc = new WebClient(); //List<DateTime> frameTimes = new List<DateTime>(); double frameTimeSpanInMiliseconds = (double)1000 / (double)fps; //double frameTimeSpanInMiliseconds = (double)(ChunkDuration) / (double)(ImageURLs.Count) * 1000; DateTime start = ctts.VideoSegmentStartTime; for (int i = 0; i < ImageURLs.Count; i++) { DateTime t; t = start.AddMilliseconds(frameTimeSpanInMiliseconds * i); //frameTimes.Add(t); Image x = Image.FromStream(wc.OpenRead(ImageURLs[i])); //imageList.Add(x); List <BoundingBox> locations = new List <BoundingBox>(); List <string> labels = new List <string>(); Dictionary <string, List <bool> > attributes = new Dictionary <string, List <bool> >(); attributes.Add("occlusion", new List <bool>()); List <int> idx = new List <int>(); if (ctts.tracks.Count != 0) { foreach (string key in ctts.tracks[0].booleanAttributeTracks.Keys) { if (!attributes.ContainsKey(key)) { attributes.Add(key, new List <bool>()); } } } for (int j = 0; j < ctts.tracks.Count; j++) { CompressedTrack ct = ctts.tracks[j]; SpaceTime st = ct.getSpaceTimeAt(t); BooleanAttribute outofview_attr = ct.getAttributeAt("outofview", t); if (st != null && outofview_attr != null && !outofview_attr.value) { BoundingBox l = st.region; locations.Add(l); labels.Add(ctts.tracks[j].label); //attributes["occlusion"].Add(ct.getAttributeAt("occlusion", t).value); foreach (string key in attributes.Keys) { attributes[key].Add(ct.getAttributeAt(key, t).value); } idx.Add(j); } } Image new_image = generateTrackImage(x, labels, locations, attributes, idx); new_image.Save(directory + "\\" + videoName + "\\img" + i.ToString("000") + ".jpg"); } FFMpegWrappers.generateVideoFromFolderofFrames(videoName, directory + "\\" + videoName + "\\"); Console.WriteLine("done"); }