public static MultiObjectTrackingResult stitchAllChunksAllObjectsOfOneVideo(List <SatyamAggregatedResultsTableEntry> entries, out List <string> ImageURLs, out int fps) { ImageURLs = new List <string>(); fps = 0; if (entries.Count == 0) { return(null); } SatyamJobStorageAccountAccess satyamStorage = new SatyamJobStorageAccountAccess(); MultiObjectTrackingResult stitched = new MultiObjectTrackingResult(); int totalFrameCounts = 0; // ensure the order is correct SortedDictionary <int, List <SatyamAggregatedResultsTableEntry> > sortedEntries = new SortedDictionary <int, List <SatyamAggregatedResultsTableEntry> >(); List <int> idx = new List <int>(); for (int i = 0; i < entries.Count; i++) { SatyamAggregatedResultsTableEntry entry = entries[i]; SatyamAggregatedResult satyamAggResult = JSonUtils.ConvertJSonToObject <SatyamAggregatedResult>(entry.ResultString); SatyamTask aggTask = JSonUtils.ConvertJSonToObject <SatyamTask>(satyamAggResult.TaskParameters); string video = URIUtilities.localDirectoryNameFromURI(aggTask.SatyamURI); string[] fields = video.Split('_'); int startingFrame = Convert.ToInt32(fields[fields.Length - 1]); if (!sortedEntries.ContainsKey(startingFrame)) { sortedEntries.Add(startingFrame, new List <SatyamAggregatedResultsTableEntry>()); idx.Add(startingFrame); } sortedEntries[startingFrame].Add(entries[i]); } idx.Sort(); List <string> AggObjIds = new List <string>(); for (int i = 0; i < idx.Count; i++) { int noFramesOverlap = 0; string blobDir = ""; // grouping all objects that belong to the same chunk MultiObjectTrackingResult aggTracksOfAllObjectsPerChunk = new MultiObjectTrackingResult(); List <string> objIds = new List <string>(); for (int j = 0; j < sortedEntries[idx[i]].Count; j++) { SatyamAggregatedResultsTableEntry entry = sortedEntries[idx[i]][j]; SatyamAggregatedResult satyamAggResult = JSonUtils.ConvertJSonToObject <SatyamAggregatedResult>(entry.ResultString); SatyamTask aggTask = JSonUtils.ConvertJSonToObject <SatyamTask>(satyamAggResult.TaskParameters); MultiObjectTrackingSubmittedJob job = JSonUtils.ConvertJSonToObject <MultiObjectTrackingSubmittedJob>(aggTask.jobEntry.JobParameters); if (job.ChunkOverlap != 0.0) { noFramesOverlap = (int)(job.ChunkOverlap * job.FrameRate); } fps = job.FrameRate; blobDir = URIUtilities.localDirectoryFullPathFromURI(aggTask.SatyamURI); string objId = URIUtilities.filenameFromURINoExtension(aggTask.SatyamURI); objIds.Add(objId); TrackletLabelingAggregatedResult aggresult = JSonUtils.ConvertJSonToObject <TrackletLabelingAggregatedResult>(satyamAggResult.AggregatedResultString); WebClient wb = new WebClient(); Stream aggTrackStream = wb.OpenRead(aggresult.AggregatedTrackletsString_URL); StreamReader reader = new StreamReader(aggTrackStream); String aggTrackString = reader.ReadToEnd(); MultiObjectTrackingResult aggTracks = JSonUtils.ConvertJSonToObject <MultiObjectTrackingResult>(aggTrackString); if (aggTracksOfAllObjectsPerChunk.tracks.Count == 0) { aggTracksOfAllObjectsPerChunk = aggTracks; } else { for (int k = 0; k < aggTracks.tracks.Count; k++) { aggTracksOfAllObjectsPerChunk.tracks.Add(aggTracks.tracks[k]); } } } List <string> TraceURLs = satyamStorage.getURLListOfSpecificExtensionUnderSubDirectoryByURI(blobDir, new List <string>() { "jpg", "png" }); if (i == 0) { ImageURLs.AddRange(TraceURLs); stitched = aggTracksOfAllObjectsPerChunk; totalFrameCounts += TraceURLs.Count; AggObjIds = objIds; } else { int noNewFrames = 0; for (int j = noFramesOverlap; j < TraceURLs.Count; j++) { ImageURLs.Add(TraceURLs[j]); noNewFrames++; } //stitched = MultiObjectTrackingAnalyzer.stitchTwoTracesByTubeletsOfOverlappingVideoChunk(stitched, // aggTracksOfAllObjectsPerChunk, totalFrameCounts, totalFrameCounts + noNewFrames, noFramesOverlap, fps); // postpone the agg trace double frameTimeInMiliSeconds = (double)1000 / (double)fps; double timeToPostponeInMilliSeconds = (double)(totalFrameCounts - noFramesOverlap) * frameTimeInMiliSeconds; //TimeSpan timeSpanToPostponeInSeconds = new TimeSpan(0,0,0,0, (int)timeToPostponeInMilliSeconds); TimeSpan timeSpanToPostponeInSeconds = DateTimeUtilities.getTimeSpanFromTotalMilliSeconds((int)timeToPostponeInMilliSeconds); aggTracksOfAllObjectsPerChunk.postpone(timeSpanToPostponeInSeconds); // overlap must be 0 for (int k = 0; k < aggTracksOfAllObjectsPerChunk.tracks.Count; k++) { if (!AggObjIds.Contains(objIds[k])) { stitched.tracks.Add(aggTracksOfAllObjectsPerChunk.tracks[k]); AggObjIds.Add(objIds[k]); } else { // stitch the track for the same id int tckIdx = AggObjIds.IndexOf(objIds[k]); stitched.tracks[tckIdx] = CompressedTrack.stitchTwoAdjacentTrack(stitched.tracks[tckIdx], aggTracksOfAllObjectsPerChunk.tracks[k]); } } totalFrameCounts += noNewFrames; } //debug //generateVideoForEvaluation(ImageURLs, stitched, directoryName + "_" + i, videoName, fps); } return(stitched); }
public static MultiObjectTrackingResult stitchTwoTracesByTubeletsOfOverlappingVideoChunk(MultiObjectTrackingResult currentTrace, MultiObjectTrackingResult nextTrace, int totalFrameCountsBeforeStitching, int totalFrameCountsAfterStitching, int noFramesOverlap, int fps = 10) { //if (noFramesOverlap == 0) return null; double frameTimeInMiliSeconds = (double)1000 / (double)fps; List <MultiObjectTrackingResult> compressedOverlappingTracks = new List <MultiObjectTrackingResult>(); DateTime overlapStart = currentTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (totalFrameCountsBeforeStitching - noFramesOverlap)); DateTime overlapEnd = currentTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (totalFrameCountsBeforeStitching - 1)); //DateTime nextTrace_overlapStart = nextTrace.VideoSegmentStartTime; //DateTime nextTrace_overlapEnd = nextTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (noFramesOverlap-1)); //MultiObjectTrackingResult nextTrace_Overlap = nextTrace.getSubTimeSegment(nextTrace_overlapStart, nextTrace_overlapEnd, fps); //compressedOverlappingTracks.Add(nextTrace_Overlap); List <List <string> > CrossChunkEntitiesAnnotationStrings = new List <List <string> >(); List <List <string> > SingleChunkEntitiesAnnotationStrings = new List <List <string> >(); for (int i = 0; i < 2; i++) { //boundingboxes.Add(new List<BoundingBox>()); CrossChunkEntitiesAnnotationStrings.Add(new List <string>()); SingleChunkEntitiesAnnotationStrings.Add(new List <string>()); } double timeToPostponeInMilliSeconds = (double)(totalFrameCountsBeforeStitching - noFramesOverlap) * frameTimeInMiliSeconds; //TimeSpan timeSpanToPostponeInSeconds = new TimeSpan(0,0,0,0, (int)timeToPostponeInMilliSeconds); TimeSpan timeSpanToPostponeInSeconds = DateTimeUtilities.getTimeSpanFromTotalMilliSeconds((int)timeToPostponeInMilliSeconds); //// get the end of the first trace and the begining + overlap of the second trace //DateTime CurrentTraceSampleFrameTime = currentTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (totalFrameCountsBeforeStitching - 1)); //DateTime NextTraceSampleFrameTime = nextTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (Math.Max(noFramesOverlap - 1, 0))); // current trace List <string> currentTrace_filteredAnnotationStrings = new List <string>(); foreach (CompressedTrack entity in currentTrace.tracks) { // get a snapshot frame at start and end of current trace VATIC_DVA_Frame currentTraceOverlapStartFrame = entity.getFrameAt(overlapStart); string annotationString = entity.unCompressToTrackAnnotationString(); string[] fields = annotationString.Split(':'); List <string> TimeFilteredSegmentStringList = new List <string>(); TimeFilteredSegmentStringList.Add(fields[0]); // adds the label back if (currentTraceOverlapStartFrame != null) { TimeFilteredSegmentStringList.Add(currentTraceOverlapStartFrame.ToAnnotationString()); // adds a starting frame } // remove the label of the second string int count = 0; for (int j = 1; j < fields.Length; j++) { // remove the overlapping part. DateTime time = DateTimeUtilities.getDateTimeFromString(fields[j].Split(',')[0]); if (time <= overlapStart || time > overlapEnd) { continue; } TimeFilteredSegmentStringList.Add(fields[j]); count++; } if (count == 0) { // not a cross chunk one SingleChunkEntitiesAnnotationStrings[0].Add(annotationString); continue; } else { //a cross chunk one CrossChunkEntitiesAnnotationStrings[0].Add(annotationString); // construct for association string filteredAnnotationString = ObjectsToStrings.ListString(TimeFilteredSegmentStringList, ':'); currentTrace_filteredAnnotationStrings.Add(filteredAnnotationString); } } string currentOverlap_totalFilteredAnnotationString = ObjectsToStrings.ListString(currentTrace_filteredAnnotationStrings, '|'); MultiObjectTrackingResult currentTrace_Overlap = MultiObjectTrackingResult.ConvertAnnotationStringToMultiObjectTrackingResult(overlapStart, currentOverlap_totalFilteredAnnotationString, "current", "0", noFramesOverlap, fps); compressedOverlappingTracks.Add(currentTrace_Overlap); // next trace /// postpone the timestamp of the second trace nextTrace.postpone(timeSpanToPostponeInSeconds); List <string> nextTrace_filteredAnnotationStrings = new List <string>(); foreach (CompressedTrack entity in nextTrace.tracks) { VATIC_DVA_Frame nextTraceOverlapStartFrame = entity.getFrameAt(overlapStart); VATIC_DVA_Frame nextTraceOverlapEndFrame = entity.getFrameAt(overlapEnd); string annotationString = entity.unCompressToTrackAnnotationString(); string[] fields = annotationString.Split(':'); List <string> TimeFilteredSegmentStringList = new List <string>(); TimeFilteredSegmentStringList.Add(fields[0]); // adds the label back if (nextTraceOverlapStartFrame != null) { TimeFilteredSegmentStringList.Add(nextTraceOverlapStartFrame.ToAnnotationString()); // adds a starting frame } // remove the label of the second string int count = 0; for (int j = 1; j < fields.Length; j++) { // remove the overlapping part. DateTime time = DateTimeUtilities.getDateTimeFromString(fields[j].Split(',')[0]); if (time <= overlapStart || time >= overlapEnd) { continue; } TimeFilteredSegmentStringList.Add(fields[j]); count++; } if (count == 0) { // not a cross chunk one SingleChunkEntitiesAnnotationStrings[1].Add(annotationString); continue; } else { //a cross chunk one CrossChunkEntitiesAnnotationStrings[1].Add(annotationString); // construct for association if (nextTraceOverlapEndFrame != null) { TimeFilteredSegmentStringList.Add(nextTraceOverlapEndFrame.ToAnnotationString()); // adds a ending frame } string filteredAnnotationString = ObjectsToStrings.ListString(TimeFilteredSegmentStringList, ':'); nextTrace_filteredAnnotationStrings.Add(filteredAnnotationString); } } string nextOverlap_totalFilteredAnnotationString = ObjectsToStrings.ListString(nextTrace_filteredAnnotationStrings, '|'); MultiObjectTrackingResult nextTrace_Overlap = MultiObjectTrackingResult.ConvertAnnotationStringToMultiObjectTrackingResult(overlapStart, nextOverlap_totalFilteredAnnotationString, "next", "1", noFramesOverlap, fps); compressedOverlappingTracks.Add(nextTrace_Overlap); List <MultipartiteWeightedMatch> association = TrackletsAssociation.AssociateTracklets(compressedOverlappingTracks); DateTime NewFrameStartTime = currentTrace.VideoSegmentStartTime + DateTimeUtilities.getTimeSpanFromTotalMilliSeconds((int)(totalFrameCountsBeforeStitching * frameTimeInMiliSeconds)); string totalStitchedAnnotationString = stitchAnnotationStringByAssociation(CrossChunkEntitiesAnnotationStrings, SingleChunkEntitiesAnnotationStrings, association, NewFrameStartTime); MultiObjectTrackingResult ret = MultiObjectTrackingResult.ConvertAnnotationStringToMultiObjectTrackingResult(currentTrace.VideoSegmentStartTime, totalStitchedAnnotationString, currentTrace.cameraId, currentTrace.UID, totalFrameCountsAfterStitching, fps); return(ret); }