public static MultiObjectTrackingResult stitchTwoTracesByTubeletsOfOverlappingVideoChunk(MultiObjectTrackingResult currentTrace, MultiObjectTrackingResult nextTrace, int totalFrameCountsBeforeStitching, int totalFrameCountsAfterStitching, int noFramesOverlap, int fps = 10) { //if (noFramesOverlap == 0) return null; double frameTimeInMiliSeconds = (double)1000 / (double)fps; List <MultiObjectTrackingResult> compressedOverlappingTracks = new List <MultiObjectTrackingResult>(); DateTime overlapStart = currentTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (totalFrameCountsBeforeStitching - noFramesOverlap)); DateTime overlapEnd = currentTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (totalFrameCountsBeforeStitching - 1)); //DateTime nextTrace_overlapStart = nextTrace.VideoSegmentStartTime; //DateTime nextTrace_overlapEnd = nextTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (noFramesOverlap-1)); //MultiObjectTrackingResult nextTrace_Overlap = nextTrace.getSubTimeSegment(nextTrace_overlapStart, nextTrace_overlapEnd, fps); //compressedOverlappingTracks.Add(nextTrace_Overlap); List <List <string> > CrossChunkEntitiesAnnotationStrings = new List <List <string> >(); List <List <string> > SingleChunkEntitiesAnnotationStrings = new List <List <string> >(); for (int i = 0; i < 2; i++) { //boundingboxes.Add(new List<BoundingBox>()); CrossChunkEntitiesAnnotationStrings.Add(new List <string>()); SingleChunkEntitiesAnnotationStrings.Add(new List <string>()); } double timeToPostponeInMilliSeconds = (double)(totalFrameCountsBeforeStitching - noFramesOverlap) * frameTimeInMiliSeconds; //TimeSpan timeSpanToPostponeInSeconds = new TimeSpan(0,0,0,0, (int)timeToPostponeInMilliSeconds); TimeSpan timeSpanToPostponeInSeconds = DateTimeUtilities.getTimeSpanFromTotalMilliSeconds((int)timeToPostponeInMilliSeconds); //// get the end of the first trace and the begining + overlap of the second trace //DateTime CurrentTraceSampleFrameTime = currentTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (totalFrameCountsBeforeStitching - 1)); //DateTime NextTraceSampleFrameTime = nextTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (Math.Max(noFramesOverlap - 1, 0))); // current trace List <string> currentTrace_filteredAnnotationStrings = new List <string>(); foreach (CompressedTrack entity in currentTrace.tracks) { // get a snapshot frame at start and end of current trace VATIC_DVA_Frame currentTraceOverlapStartFrame = entity.getFrameAt(overlapStart); string annotationString = entity.unCompressToTrackAnnotationString(); string[] fields = annotationString.Split(':'); List <string> TimeFilteredSegmentStringList = new List <string>(); TimeFilteredSegmentStringList.Add(fields[0]); // adds the label back if (currentTraceOverlapStartFrame != null) { TimeFilteredSegmentStringList.Add(currentTraceOverlapStartFrame.ToAnnotationString()); // adds a starting frame } // remove the label of the second string int count = 0; for (int j = 1; j < fields.Length; j++) { // remove the overlapping part. DateTime time = DateTimeUtilities.getDateTimeFromString(fields[j].Split(',')[0]); if (time <= overlapStart || time > overlapEnd) { continue; } TimeFilteredSegmentStringList.Add(fields[j]); count++; } if (count == 0) { // not a cross chunk one SingleChunkEntitiesAnnotationStrings[0].Add(annotationString); continue; } else { //a cross chunk one CrossChunkEntitiesAnnotationStrings[0].Add(annotationString); // construct for association string filteredAnnotationString = ObjectsToStrings.ListString(TimeFilteredSegmentStringList, ':'); currentTrace_filteredAnnotationStrings.Add(filteredAnnotationString); } } string currentOverlap_totalFilteredAnnotationString = ObjectsToStrings.ListString(currentTrace_filteredAnnotationStrings, '|'); MultiObjectTrackingResult currentTrace_Overlap = MultiObjectTrackingResult.ConvertAnnotationStringToMultiObjectTrackingResult(overlapStart, currentOverlap_totalFilteredAnnotationString, "current", "0", noFramesOverlap, fps); compressedOverlappingTracks.Add(currentTrace_Overlap); // next trace /// postpone the timestamp of the second trace nextTrace.postpone(timeSpanToPostponeInSeconds); List <string> nextTrace_filteredAnnotationStrings = new List <string>(); foreach (CompressedTrack entity in nextTrace.tracks) { VATIC_DVA_Frame nextTraceOverlapStartFrame = entity.getFrameAt(overlapStart); VATIC_DVA_Frame nextTraceOverlapEndFrame = entity.getFrameAt(overlapEnd); string annotationString = entity.unCompressToTrackAnnotationString(); string[] fields = annotationString.Split(':'); List <string> TimeFilteredSegmentStringList = new List <string>(); TimeFilteredSegmentStringList.Add(fields[0]); // adds the label back if (nextTraceOverlapStartFrame != null) { TimeFilteredSegmentStringList.Add(nextTraceOverlapStartFrame.ToAnnotationString()); // adds a starting frame } // remove the label of the second string int count = 0; for (int j = 1; j < fields.Length; j++) { // remove the overlapping part. DateTime time = DateTimeUtilities.getDateTimeFromString(fields[j].Split(',')[0]); if (time <= overlapStart || time >= overlapEnd) { continue; } TimeFilteredSegmentStringList.Add(fields[j]); count++; } if (count == 0) { // not a cross chunk one SingleChunkEntitiesAnnotationStrings[1].Add(annotationString); continue; } else { //a cross chunk one CrossChunkEntitiesAnnotationStrings[1].Add(annotationString); // construct for association if (nextTraceOverlapEndFrame != null) { TimeFilteredSegmentStringList.Add(nextTraceOverlapEndFrame.ToAnnotationString()); // adds a ending frame } string filteredAnnotationString = ObjectsToStrings.ListString(TimeFilteredSegmentStringList, ':'); nextTrace_filteredAnnotationStrings.Add(filteredAnnotationString); } } string nextOverlap_totalFilteredAnnotationString = ObjectsToStrings.ListString(nextTrace_filteredAnnotationStrings, '|'); MultiObjectTrackingResult nextTrace_Overlap = MultiObjectTrackingResult.ConvertAnnotationStringToMultiObjectTrackingResult(overlapStart, nextOverlap_totalFilteredAnnotationString, "next", "1", noFramesOverlap, fps); compressedOverlappingTracks.Add(nextTrace_Overlap); List <MultipartiteWeightedMatch> association = TrackletsAssociation.AssociateTracklets(compressedOverlappingTracks); DateTime NewFrameStartTime = currentTrace.VideoSegmentStartTime + DateTimeUtilities.getTimeSpanFromTotalMilliSeconds((int)(totalFrameCountsBeforeStitching * frameTimeInMiliSeconds)); string totalStitchedAnnotationString = stitchAnnotationStringByAssociation(CrossChunkEntitiesAnnotationStrings, SingleChunkEntitiesAnnotationStrings, association, NewFrameStartTime); MultiObjectTrackingResult ret = MultiObjectTrackingResult.ConvertAnnotationStringToMultiObjectTrackingResult(currentTrace.VideoSegmentStartTime, totalStitchedAnnotationString, currentTrace.cameraId, currentTrace.UID, totalFrameCountsAfterStitching, fps); return(ret); }
public static MultiObjectTrackingResult stitchTwoTracesByOneFrameBoundingBoxes(MultiObjectTrackingResult currentTrace, MultiObjectTrackingResult nextTrace, int totalFrameCountsBeforeStitching, int totalFrameCountsAfterStitching, int noFramesOverlap = 0, int fps = 10) { List <List <BoundingBox> > boundingboxes = new List <List <BoundingBox> >(); List <List <string> > CrossChunkEntitiesAnnotationStrings = new List <List <string> >(); List <List <string> > SingleChunkEntitiesAnnotationStrings = new List <List <string> >(); for (int i = 0; i < 2; i++) { boundingboxes.Add(new List <BoundingBox>()); CrossChunkEntitiesAnnotationStrings.Add(new List <string>()); SingleChunkEntitiesAnnotationStrings.Add(new List <string>()); } double frameTimeInMiliSeconds = (double)1000 / (double)fps; double timeToPostponeInMilliSeconds = (double)(totalFrameCountsBeforeStitching - noFramesOverlap) * frameTimeInMiliSeconds; //TimeSpan timeSpanToPostponeInSeconds = new TimeSpan(0,0,0,0, (int)timeToPostponeInMilliSeconds); TimeSpan timeSpanToPostponeInSeconds = DateTimeUtilities.getTimeSpanFromTotalMilliSeconds((int)timeToPostponeInMilliSeconds); // get the end of the first trace and the begining + overlap of the second trace DateTime CurrentTraceSampleFrameTime = currentTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (totalFrameCountsBeforeStitching - 1)); DateTime NextTraceSampleFrameTime = nextTrace.VideoSegmentStartTime.AddMilliseconds(frameTimeInMiliSeconds * (Math.Max(noFramesOverlap - 1, 0))); // current trace foreach (CompressedTrack entity in currentTrace.tracks) { SpaceTime st = entity.getSpaceTimeAt(CurrentTraceSampleFrameTime); BooleanAttribute outofview_attr = entity.getAttributeAt("outofview", CurrentTraceSampleFrameTime); if (st != null && outofview_attr != null && !outofview_attr.value) { BoundingBox box = st.region; boundingboxes[0].Add(box); CrossChunkEntitiesAnnotationStrings[0].Add(entity.unCompressToTrackAnnotationString()); } else { SingleChunkEntitiesAnnotationStrings[0].Add(entity.unCompressToTrackAnnotationString()); } } // next trace nextTrace.VideoSegmentStartTime += timeSpanToPostponeInSeconds; nextTrace.VideoSegmentEndTime += timeSpanToPostponeInSeconds; foreach (CompressedTrack entity in nextTrace.tracks) { // get the frame first before changing the time stmaps. SpaceTime st = entity.getSpaceTimeAt(NextTraceSampleFrameTime); BooleanAttribute outofview_attr = entity.getAttributeAt("outofview", entity.endTime); /// postpone the timestamp of the second trace entity.startTime += timeSpanToPostponeInSeconds; entity.endTime += timeSpanToPostponeInSeconds; entity.spaceTimeTrack.startTime += timeSpanToPostponeInSeconds; entity.spaceTimeTrack.endTime += timeSpanToPostponeInSeconds; //foreach (SpaceTime st in entity.spaceTimeTrack.space_time_track) for (int i = 0; i < entity.spaceTimeTrack.space_time_track.Count; i++) { entity.spaceTimeTrack.space_time_track[i].time += timeSpanToPostponeInSeconds; } //foreach (CompressedBooleanAttributeTrack booleanAttributeTrack in entity.booleanAttributeTracks.Values) foreach (string key in entity.booleanAttributeTracks.Keys) { entity.booleanAttributeTracks[key].startTime += timeSpanToPostponeInSeconds; entity.booleanAttributeTracks[key].endTime += timeSpanToPostponeInSeconds; //foreach (BooleanAttribute ba in entity.booleanAttributeTracks[key].attribute_track) for (int i = 0; i < entity.booleanAttributeTracks[key].attribute_track.Count; i++) { entity.booleanAttributeTracks[key].attribute_track[i].time += timeSpanToPostponeInSeconds; } } if (st != null && outofview_attr != null && !outofview_attr.value) { BoundingBox box = st.region; boundingboxes[1].Add(box); CrossChunkEntitiesAnnotationStrings[1].Add(entity.unCompressToTrackAnnotationString()); } else { SingleChunkEntitiesAnnotationStrings[1].Add(entity.unCompressToTrackAnnotationString()); } } List <MultipartiteWeightedMatch> association = BoundingBoxAssociation.computeBoundingBoxAssociations(boundingboxes); DateTime NewFrameStartTime = currentTrace.VideoSegmentStartTime + DateTimeUtilities.getTimeSpanFromTotalMilliSeconds((int)(totalFrameCountsBeforeStitching * frameTimeInMiliSeconds)); string totalStitchedAnnotationString = stitchAnnotationStringByAssociation(CrossChunkEntitiesAnnotationStrings, SingleChunkEntitiesAnnotationStrings, association, NewFrameStartTime); MultiObjectTrackingResult ret = MultiObjectTrackingResult.ConvertAnnotationStringToMultiObjectTrackingResult(currentTrace.VideoSegmentStartTime, totalStitchedAnnotationString, currentTrace.cameraId, currentTrace.UID, totalFrameCountsAfterStitching, fps); return(ret); }