public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var count = batch.Count; batch.bitvector = batch.bitvector.MakeWritable(this.pool.bitvectorPool); fixed(long *vsync = batch.vsync.col) fixed(long *vother = batch.vother.col) fixed(long *bv = batch.bitvector.col) { for (int i = 0; i < count; i++) { if ((bv[i >> 6] & (1L << (i & 0x3f))) == 0 || vother[i] == long.MinValue) { if (batch.vother.col[i] == long.MinValue) // Punctuation { if (vsync[i] == StreamEvent.InfinitySyncTime) { OutputAllEvents(); } else { OutputCompletedIntervals(); } this.lastCti = Math.Max(vsync[i], this.lastCti); this.lastSyncTime = Math.Max(vsync[i], this.lastSyncTime); AddPunctuationToBatch(batch.vsync.col[i]); } else if (vsync[i] < vother[i]) // Start edge or interval { var evt = new ActiveEvent { End = vother[i], Payload = batch[i], Key = batch.key.col[i], Hash = batch.hash.col[i] }; if (!this.eventMap.TryGetValue(vsync[i], out var entry)) { this.dictPool.Get(out entry); this.eventMap.Add(vsync[i], entry); } if (!entry.Lookup(evt, out int index)) { entry.Insert(evt, 1); } else { entry.entries[index].value++; } } else // end edge { // lookup corresponding start edge var lookupevt = new ActiveEvent { End = StreamEvent.InfinitySyncTime, Payload = batch[i], Key = batch.key.col[i], Hash = batch.hash.col[i] }; if (!this.eventMap.TryGetValue(vother[i], out FastDictionary2 <ActiveEvent, int> entry)) { throw new InvalidOperationException("Found end edge without corresponding start edge"); } if (!entry.Lookup(lookupevt, out int index)) { throw new InvalidOperationException("Found end edge without corresponding start edge"); } // Set interval payload to the payload of the original start-edge // (in case they are different due to an optimized payload equality comparer) lookupevt.Payload = entry.entries[index].key.Payload; // delete the start edge entry.entries[index].value--; if (entry.entries[index].value == 0) { entry.Remove(lookupevt); } // insert interval lookupevt.End = batch.vsync.col[i]; if (!entry.Lookup(lookupevt, out index)) { entry.Insert(lookupevt, 1); } else { entry.entries[index].value++; } OutputCompletedIntervals(); // Can make this more efficient by trying only if the first event in index got completed } } } } batch.Free(); }
protected override void ProcessBothBatches(StreamMessage <TKey, TLeft> leftBatch, StreamMessage <TKey, TRight> rightBatch, out bool leftBatchDone, out bool rightBatchDone, out bool leftBatchFree, out bool rightBatchFree) { leftBatchFree = rightBatchFree = true; if (!GoToVisibleRow(leftBatch)) { leftBatchDone = true; rightBatchDone = false; return; } UpdateNextLeftTime(leftBatch.vsync.col[leftBatch.iter]); if (!GoToVisibleRow(rightBatch)) { leftBatchDone = false; rightBatchDone = true; return; } UpdateNextRightTime(rightBatch.vsync.col[rightBatch.iter]); while (true) { if (this.nextLeftTime <= this.nextRightTime) { UpdateTime(this.nextLeftTime); ProcessLeftEvent( this.nextLeftTime, leftBatch.vother.col[leftBatch.iter], ref leftBatch.key.col[leftBatch.iter], leftBatch[leftBatch.iter], leftBatch.hash.col[leftBatch.iter]); leftBatch.iter++; if (!GoToVisibleRow(leftBatch)) { leftBatchDone = true; rightBatchDone = false; return; } UpdateNextLeftTime(leftBatch.vsync.col[leftBatch.iter]); } else { UpdateTime(this.nextRightTime); ProcessRightEvent( this.nextRightTime, rightBatch.vother.col[rightBatch.iter], ref rightBatch.key.col[rightBatch.iter], rightBatch[rightBatch.iter], rightBatch.hash.col[rightBatch.iter]); rightBatch.iter++; if (!GoToVisibleRow(rightBatch)) { leftBatchDone = false; rightBatchDone = true; return; } UpdateNextRightTime(rightBatch.vsync.col[rightBatch.iter]); } } }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var count = batch.Count; var srckey = batch.key.col; SavedEventList <TKey, TPayload> sevref = default; fixed(long *src_bv = batch.bitvector.col, src_vsync = batch.vsync.col, src_vother = batch.vother.col) { fixed(int *src_hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0) { var partitionKey = this.getPartitionKey(srckey[i]); int partitionIndex = EnsurePartition(partitionKey); long synctime = src_vsync[i]; int index; if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { ProcessCurrentTimestamp(partitionKey, partitionIndex); this.lastSyncTime.entries[partitionIndex].value = synctime; } bool done = false; var eventListTraverser = new FastMap <SavedEventList <TKey, TPayload> > .FindTraverser(this.currentTimestampEventList.entries[partitionIndex].value); if (eventListTraverser.Find(src_hash[i])) { while (eventListTraverser.Next(out index)) { var state = this.currentTimestampEventList.entries[partitionIndex].value.Values[index]; if (this.keyEqualityComparer(state.key, srckey[i])) { state.payloads.Add(batch.payload.col[i]); done = true; break; } } } if (!done) { index = this.currentTimestampEventList.entries[partitionIndex].value.Insert(src_hash[i]); sevref.payloads = new List <TPayload>(10) { batch.payload.col[i] }; sevref.key = srckey[i]; this.currentTimestampEventList.entries[partitionIndex].value.Values[index] = sevref; } } else if (src_vother[i] == PartitionedStreamEvent.PunctuationOtherTime) { int partitionIndex = FastDictionary2 <TPartitionKey, List <TKey> > .IteratorStart; long synctime = src_vsync[i]; while (this.lastSyncTime.Iterate(ref partitionIndex)) { if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { ProcessCurrentTimestamp(this.lastSyncTime.entries[partitionIndex].key, partitionIndex); this.lastSyncTime.entries[partitionIndex].value = synctime; } } OnLowWatermark(synctime); } else if (src_vother[i] == PartitionedStreamEvent.PunctuationOtherTime) { var partitionKey = this.getPartitionKey(srckey[i]); int partitionIndex = EnsurePartition(partitionKey); long synctime = src_vsync[i]; if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { ProcessCurrentTimestamp(partitionKey, partitionIndex); this.lastSyncTime.entries[partitionIndex].value = synctime; } } } } } batch.Free(); }
public override unsafe void OnNext(StreamMessage <TKey, TInput> batch) { var count = batch.Count; var colkey = batch.key.col; var colpayload = batch.payload.col; fixed(long *col_vsync = batch.vsync.col) fixed(long *col_vother = batch.vother.col) fixed(int *col_hash = batch.hash.col) fixed(long *col_bv = batch.bitvector.col) for (int i = 0; i < count; i++) { if ((col_bv[i >> 6] & (1L << (i & 0x3f))) != 0) { if (col_vother[i] == StreamEvent.PunctuationOtherTime) { // We have found a row that corresponds to punctuation OnPunctuation(col_vsync[i]); int c = this.batch.Count; this.batch.vsync.col[c] = col_vsync[i]; this.batch.vother.col[c] = StreamEvent.PunctuationOtherTime; this.batch.key.col[c] = default; this.batch.hash.col[c] = 0; this.batch.bitvector.col[c >> 6] |= (1L << (c & 0x3f)); this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } continue; } var syncTime = col_vsync[i]; HeldState <TState> heldState; // Handle time moving forward if (syncTime > this.lastSyncTime) { foreach (int iter1 in this.heldAggregates) { var iter1entry = this.aggregateByKey.entries[iter1]; int c = this.batch.Count; this.batch.vsync.col[c] = iter1entry.value.timestamp; this.batch.vother.col[c] = StreamEvent.InfinitySyncTime; this.batch.payload.col[c] = this.computeResult(iter1entry.value.state); this.batch.key.col[c] = iter1entry.key; this.batch.hash.col[c] = this.keyComparerGetHashCode(iter1entry.key); this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } this.heldAggregates.Clear(); // Since sync time changed, set lastSyncTime this.lastSyncTime = syncTime; } // Need to retrieve the key from the dictionary if (!this.aggregateByKey.Lookup(colkey[i], col_hash[i], out int aggindex)) { // New group. Create new state heldState = new HeldState <TState> { state = this.initialState(), timestamp = syncTime }; this.heldAggregates.Add(this.aggregateByKey.Insert(colkey[i], heldState, col_hash[i])); // No output because initial state is empty } else if (this.heldAggregates.Add(aggindex)) { // First time group is active for this time heldState = this.aggregateByKey.entries[aggindex].value; if (syncTime > heldState.timestamp) { // Output end edge int c = this.batch.Count; this.batch.vsync.col[c] = syncTime; this.batch.vother.col[c] = heldState.timestamp; this.batch.payload.col[c] = this.computeResult(heldState.state); this.batch.key.col[c] = colkey[i]; this.batch.hash.col[c] = this.keyComparerGetHashCode(colkey[i]); this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } heldState.timestamp = syncTime; } } else { // read new currentState from _heldAgg index heldState = this.aggregateByKey.entries[aggindex].value; } // It's always a start edge heldState.state = this.accumulate(heldState.state, col_vsync[i], colpayload[i]); } batch.Release(); batch.Return(); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var count = batch.Count; var srckey = batch.key.col; SavedEventList <TKey, TPayload> sevref = default; fixed(long *src_bv = batch.bitvector.col, src_vsync = batch.vsync.col) { fixed(int *src_hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0 || batch.vother.col[i] < 0) { long synctime = src_vsync[i]; int index; if (synctime > this.lastSyncTime) // move time forward { ProcessCurrentTimestamp(); this.lastSyncTime = synctime; } if (batch.vother.col[i] < 0) { OnPunctuation(synctime); continue; } bool done = false; if (this.eventListTraverser.Find(src_hash[i])) { while (this.eventListTraverser.Next(out index)) { var state = this.currentTimestampEventList.Values[index]; if (this.keyEqualityComparer(state.key, srckey[i])) { state.payloads.Add(batch.payload.col[i]); done = true; break; } } } if (!done) { index = this.currentTimestampEventList.Insert(src_hash[i]); sevref.payloads = new List <TPayload>(10) { batch.payload.col[i] }; sevref.key = srckey[i]; this.currentTimestampEventList.Values[index] = sevref; } } } } } batch.Free(); }
public void OnNext(StreamMessage <TKey, TSpray> batch) { if (this.multicast) { for (int i = 0; i < this.totalBranches; i++) { this.pool.Get(out StreamMessage <TKey, TSpray> _outbatch); _outbatch.CloneFrom(batch, false); this.Observers[i].OnNext(_outbatch); } batch.Free(); return; } // If this batch contains any punctuation, we need to broadcast them to all observers. StreamMessage <TKey, TSpray> broadcastMaster = null; for (int i = batch.Count - 1; i >= 0; i--) { if (batch.vother.col[i] < 0) { // Create a master broadcast batch that we can clone from. // TODO: maybe it's better to allocate a new batch without deleted data gaps? this.pool.Get(out broadcastMaster); broadcastMaster.CloneFrom(batch); broadcastMaster.bitvector = broadcastMaster.bitvector.MakeWritable(this.pool.bitvectorPool); // Since we only care about punctuations, delete everything for (int deletingIndex = 0; deletingIndex <= broadcastMaster.Count >> 6; deletingIndex++) { broadcastMaster.bitvector.col[deletingIndex] = ~(0L); } break; } } int originalBatchRecipient; if (this.spraySortOrderComparer == null) { originalBatchRecipient = this.l1_spray; this.Observers[this.l1_spray].OnNext(batch); this.l1_spray++; if (this.l1_spray == this.totalBranches) { this.l1_spray = 0; } } else { if (this.first || (this.spraySortOrderComparerFunc(this.lastElem, batch[0]) == 0)) { this.first = false; this.lastElem = batch[batch.Count - 1]; originalBatchRecipient = this.l1_spray; this.Observers[this.l1_spray].OnNext(batch); } else { this.lastElem = batch[batch.Count - 1]; this.l1_spray++; if (this.l1_spray == this.totalBranches) { this.l1_spray = 0; } originalBatchRecipient = this.l1_spray; this.Observers[this.l1_spray].OnNext(batch); } } if (broadcastMaster != null) { // Broadcast to all except the observer that received the current batch int lastBroadcastIndex = this.totalBranches - 1; if (lastBroadcastIndex == originalBatchRecipient) { lastBroadcastIndex--; } for (int i = 0; i < this.totalBranches; i++) { if (i == originalBatchRecipient) { continue; // skip observer that received the current batch } if (i == lastBroadcastIndex) { this.Observers[i].OnNext(broadcastMaster); } else { this.pool.Get(out StreamMessage <TKey, TSpray> broadcastClone); broadcastClone.CloneFrom(broadcastMaster); this.Observers[i].OnNext(broadcastClone); } } } }
protected override void ProcessRightBatch(StreamMessage <TKey, TPayload> rightBatch, out bool rightBatchDone, out bool rightBatchFree) { rightBatchDone = true; rightBatchFree = false; this.Observer.OnNext(rightBatch); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var count = batch.Count; fixed(long *bv = batch.bitvector.col) { for (int i = 0; i < count; i++) { if ((bv[i >> 6] & (1L << (i & 0x3f))) == 0) { if (batch.vsync.col[i] > this.lastSyncTime) { ReachTime(batch.vsync.col[i]); } if (batch.vother.col[i] == StreamEvent.InfinitySyncTime) { } else if (batch.vother.col[i] < batch.vsync.col[i]) { int ind = this.output.Count++; this.output.vsync.col[ind] = batch.vsync.col[i]; this.output.vother.col[ind] = batch.vsync.col[i] + 1; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = batch[i]; this.output.hash.col[ind] = batch.hash.col[i]; if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } else { int index = this.intervalMap.Insert(batch.hash.col[i]); this.intervalMap.Values[index].Populate(batch.key.col[i], batch[i], batch.hash.col[i]); this.endPointHeap.Insert(batch.vother.col[i], index); } } else if (batch.vother.col[i] == StreamEvent.PunctuationOtherTime) { ReachTime(batch.vsync.col[i]); int ind = this.output.Count++; this.output.vsync.col[ind] = batch.vsync.col[i]; this.output.vother.col[ind] = batch.vother.col[i]; this.output.key.col[ind] = batch.key.col[i]; this.output.payload.col[ind] = default; this.output.hash.col[ind] = batch.hash.col[i]; this.output.bitvector.col[ind >> 6] |= (1L << (ind & 0x3f)); if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } } } batch.Free(); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var count = batch.Count; fixed(long *bv = batch.bitvector.col) { for (int i = 0; i < count; i++) { if ((bv[i >> 6] & (1L << (i & 0x3f))) == 0) { if (batch.vsync.col[i] > this.lastSyncTime) { ReachTime(batch.vsync.col[i]); } if (batch.vother.col[i] == StreamEvent.InfinitySyncTime) // For start events, copy directly across { int ind = this.output.Count++; this.output.vsync.col[ind] = batch.vsync.col[i]; this.output.vother.col[ind] = StreamEvent.InfinitySyncTime; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = batch[i]; this.output.hash.col[ind] = batch.hash.col[i]; if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } else if (batch.vother.col[i] > batch.vsync.col[i]) // For intervals, just extend the duration { int ind = this.output.Count++; this.output.vsync.col[ind] = batch.vsync.col[i]; this.output.vother.col[ind] = batch.vother.col[i] + this.duration; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = batch[i]; this.output.hash.col[ind] = batch.hash.col[i]; if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } else { int index = this.endPointMap.Insert(batch.hash.col[i]); this.endPointMap.Values[index].Populate(batch.key.col[i], batch[i], batch.vother.col[i], batch.hash.col[i]); this.endPointHeap.Insert(batch.vsync.col[i] + this.duration, index); } } else if (batch.vother.col[i] == long.MinValue) { ReachTime(batch.vsync.col[i]); int ind = this.output.Count++; this.output.vsync.col[ind] = batch.vsync.col[i]; this.output.vother.col[ind] = long.MinValue; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = default; this.output.hash.col[ind] = batch.hash.col[i]; this.output.bitvector.col[ind >> 6] |= (1L << (ind & 0x3f)); if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } } } batch.Free(); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var stack = new Stack <int>(); var activeFindTraverser = new FastMap <GroupedActiveState <TKey, TRegister> > .FindTraverser(this.activeStates); var tentativeFindTraverser = new FastMap <OutputEvent <TKey, TRegister> > .FindTraverser(this.tentativeOutput); var tentativeVisibleTraverser = new FastMap <OutputEvent <TKey, TRegister> > .VisibleTraverser(this.tentativeOutput); var count = batch.Count; var dest_vsync = this.batch.vsync.col; var dest_vother = this.batch.vother.col; var destkey = this.batch.key.col; var dest_hash = this.batch.hash.col; var srckey = batch.key.col; fixed(long *src_bv = batch.bitvector.col, src_vsync = batch.vsync.col) { fixed(int *src_hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0) { long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { int index; if (synctime > this.lastSyncTime) // move time forward { this.seenEvent.Clear(); if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out index, out int hash)) { var elem = this.tentativeOutput.Values[index]; dest_vsync[this.iter] = this.lastSyncTime; dest_vother[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; destkey[this.iter] = elem.key; dest_hash[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } this.tentativeOutput.Clear(); // Clear the tentative output list } this.lastSyncTime = synctime; } if (this.seenEvent.Lookup(srckey[i], out index)) // Incoming event is a simultaneous one { if (this.seenEvent.entries[index].value == 1) // Detecting first duplicate, need to adjust state { this.seenEvent.entries[index].value = 2; // Delete tentative output for that key if (tentativeFindTraverser.Find(src_hash[i])) { while (tentativeFindTraverser.Next(out index)) { if (this.keyEqualityComparer(this.tentativeOutput.Values[index].key, srckey[i])) { tentativeFindTraverser.Remove(); } } } // Delete active states for that key if (activeFindTraverser.Find(src_hash[i])) { while (activeFindTraverser.Next(out index)) { if (this.keyEqualityComparer(this.activeStates.Values[index].key, srckey[i])) { activeFindTraverser.Remove(); } } } } // Dont process this event continue; } else { this.seenEvent.Insert(ref index, srckey[i], 1); } } /* (1) Process currently active states */ bool ended = true; if (activeFindTraverser.Find(src_hash[i])) { int orig_index; while (activeFindTraverser.Next(out int index)) { orig_index = index; var state = this.activeStates.Values[index]; if (!this.keyEqualityComparer(state.key, srckey[i])) { continue; } if (state.PatternStartTimestamp + this.MaxDuration > synctime) { var currentStateMap = this.singleEventStateMap[state.state]; if (currentStateMap != null) { var m = currentStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = currentStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], state.register)) { var newReg = arcinfo.Transfer == null ? state.register : arcinfo.Transfer(synctime, batch[i], state.register); int ns = arcinfo.toState; while (true) { if (this.isFinal[ns]) { if (!this.IsSyncTimeSimultaneityFree) { int ind = this.tentativeOutput.Insert(src_hash[i]); this.tentativeOutput.Values[ind].other = state.PatternStartTimestamp + this.MaxDuration; this.tentativeOutput.Values[ind].key = srckey[i]; this.tentativeOutput.Values[ind].payload = newReg; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = state.PatternStartTimestamp + this.MaxDuration; this.batch[this.iter] = newReg; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[ns]) { if (index == -1) { index = this.activeStates.Insert(src_hash[i]); } this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].state = ns; this.activeStates.Values[index].register = newReg; this.activeStates.Values[index].PatternStartTimestamp = state.PatternStartTimestamp; index = -1; ended = false; // Add epsilon arc destinations to stack if (this.epsilonStateMap == null) { break; } if (this.epsilonStateMap[ns] != null) { for (int cnt2 = 0; cnt2 < this.epsilonStateMap[ns].Length; cnt2++) { stack.Push(this.epsilonStateMap[ns][cnt2]); } } } if (stack.Count == 0) { break; } ns = stack.Pop(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one successful transition } } } } } if (index == orig_index) { activeFindTraverser.Remove(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one active state } } } /* (2) Start new activations from the start state(s) */ if (!this.AllowOverlappingInstances && !ended) { continue; } for (int counter = 0; counter < this.numStartStates; counter++) { int startState = this.startStates[counter]; var startStateMap = this.singleEventStateMap[startState]; if (startStateMap != null) { var m = startStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = startStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], this.defaultRegister)) { var newReg = arcinfo.Transfer == null ? this.defaultRegister : arcinfo.Transfer(synctime, batch[i], this.defaultRegister); int ns = arcinfo.toState; while (true) { if (this.isFinal[ns]) { if (!this.IsSyncTimeSimultaneityFree) { int ind = this.tentativeOutput.Insert(src_hash[i]); this.tentativeOutput.Values[ind].other = synctime + this.MaxDuration; this.tentativeOutput.Values[ind].key = srckey[i]; this.tentativeOutput.Values[ind].payload = newReg; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = synctime + this.MaxDuration; this.batch[this.iter] = newReg; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[ns]) { int index = this.activeStates.Insert(src_hash[i]); this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].state = ns; this.activeStates.Values[index].register = newReg; this.activeStates.Values[index].PatternStartTimestamp = synctime; // Add epsilon arc destinations to stack if (this.epsilonStateMap == null) { break; } if (this.epsilonStateMap[ns] != null) { for (int cnt2 = 0; cnt2 < this.epsilonStateMap[ns].Length; cnt2++) { stack.Push(this.epsilonStateMap[ns][cnt2]); } } } if (stack.Count == 0) { break; } ns = stack.Pop(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one successful transition } } } } if (this.IsDeterministic) { break; // We are guaranteed to have only one start state } } } else if (batch.vother.col[i] < 0 && !this.IsSyncTimeSimultaneityFree) { long synctime = src_vsync[i]; if (synctime > this.lastSyncTime) // move time forward { this.seenEvent.Clear(); if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out int index, out int hash)) { var elem = this.tentativeOutput.Values[index]; this.batch.vsync.col[this.iter] = this.lastSyncTime; this.batch.vother.col[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; this.batch.key.col[this.iter] = elem.key; this.batch.hash.col[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); } } this.tentativeOutput.Clear(); // Clear the tentative output list } this.lastSyncTime = synctime; } OnPunctuation(synctime); } } } } batch.Free(); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var count = batch.Count; fixed(long *bv = batch.bitvector.col) fixed(long *vsync = batch.vsync.col) fixed(long *vother = batch.vother.col) fixed(int *hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((bv[i >> 6] & (1L << (i & 0x3f))) == 0) { if (vsync[i] > vother[i]) // We have an end edge { ReachTime(-1, vsync[i]); } else { // Check to see if the key is already being tracked if (!this.lastDataTimeDictionary.Lookup(batch.key.col[i], out int keyIndex)) { keyIndex = AllocatePartition(batch.key.col[i]); } ReachTime(keyIndex, vsync[i]); // Check to see if advancing time removed the key if (!this.lastDataTimeDictionary.Lookup(batch.key.col[i], out keyIndex)) { keyIndex = AllocatePartition(batch.key.col[i]); } if (!this.stateDictionary.entries[keyIndex].value.Any()) { this.orderedKeys.AddLast(new LinkedListNode <TKey>(batch.key.col[i])); } else { var oldThreshold = Math.Min(this.lastDataTimeDictionary.entries[keyIndex].value + this.sessionTimeout, this.windowEndTimeDictionary.entries[keyIndex].value); var newThreshold = Math.Min(vsync[i] + this.sessionTimeout, this.windowEndTimeDictionary.entries[keyIndex].value); if (newThreshold > oldThreshold) { var node = this.orderedKeys.Find(batch.key.col[i]); this.orderedKeys.Remove(node); this.orderedKeys.AddLast(node); } } this.lastDataTimeDictionary.entries[keyIndex].value = vsync[i]; this.stateDictionary.entries[keyIndex].value.Enqueue(new ActiveEvent { Key = batch.key.col[i], Sync = vsync[i], Hash = hash[i], Payload = batch.payload.col[i], }); int ind = this.output.Count++; this.output.vsync.col[ind] = vsync[i]; this.output.vother.col[ind] = StreamEvent.InfinitySyncTime; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = batch.payload.col[i]; this.output.hash.col[ind] = hash[i]; if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } } else if (vother[i] == long.MinValue) { ReachTime(-1, vsync[i]); int ind = this.output.Count++; this.output.vsync.col[ind] = vsync[i]; this.output.vother.col[ind] = long.MinValue; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = batch.payload.col[i]; this.output.hash.col[ind] = hash[i]; this.output.bitvector.col[ind >> 6] |= (1L << (ind & 0x3f)); if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } } } batch.Free(); }
protected override void ProcessBothBatches(StreamMessage <TKey, TPayload> leftBatch, StreamMessage <TKey, TPayload> rightBatch, out bool leftBatchDone, out bool rightBatchDone, out bool leftBatchFree, out bool rightBatchFree) { leftBatchFree = rightBatchFree = true; long lastLeftTime = -1; long lastRightTime = -1; bool first = (leftBatch.iter == 0); if (!GoToVisibleRow(leftBatch)) { leftBatchDone = true; rightBatchDone = false; return; } this.nextLeftTime = leftBatch.vsync.col[leftBatch.iter]; if (first) { lastLeftTime = leftBatch.vsync.col[leftBatch.Count - 1]; } first = (rightBatch.iter == 0); if (!GoToVisibleRow(rightBatch)) { leftBatchDone = false; rightBatchDone = true; return; } this.nextRightTime = rightBatch.vsync.col[rightBatch.iter]; if (first) { lastRightTime = rightBatch.vsync.col[rightBatch.Count - 1]; } if ((lastLeftTime != -1) && (lastRightTime != -1)) { leftBatchDone = rightBatchDone = false; if (lastLeftTime <= this.nextRightTime) { OutputBatch(leftBatch); leftBatchDone = true; leftBatchFree = false; } if (Config.DeterministicWithinTimestamp ? (lastRightTime < this.nextLeftTime) : (lastRightTime <= this.nextLeftTime)) { OutputBatch(rightBatch); rightBatchDone = true; rightBatchFree = false; } if (leftBatchDone || rightBatchDone) { return; } } while (true) { if (this.nextLeftTime <= this.nextRightTime) { OutputCurrentTuple(leftBatch); leftBatch.iter++; if (!GoToVisibleRow(leftBatch)) { leftBatchDone = true; rightBatchDone = false; return; } this.nextLeftTime = leftBatch.vsync.col[leftBatch.iter]; } else { OutputCurrentTuple(rightBatch); rightBatch.iter++; if (!GoToVisibleRow(rightBatch)) { leftBatchDone = false; rightBatchDone = true; return; } this.nextRightTime = rightBatch.vsync.col[rightBatch.iter]; } } }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { this.batchIter = batch.iter; TPayload[] sourcePayload = batch.payload.col; TKey[] sourceKey = batch.key.col; fixed(int *sourceHash = batch.hash.col) fixed(long *sourceBitVector = batch.bitvector.col) fixed(long *sourceVSync = batch.vsync.col) fixed(long *sourceVOther = batch.vother.col) { int count = batch.Count; int * sourceHashPtr = sourceHash; long *sourceVSyncPtr = sourceVSync; long *sourceVOtherPtr = sourceVOther; for (int row = 0; row < count; row++) { if ((sourceBitVector[row >> 6] & (1L << (row & 0x3f))) == 0 || *sourceVOtherPtr == long.MinValue) { long startTime = *sourceVSyncPtr; long endTime = *sourceVOtherPtr; int hash = *sourceHashPtr; AdvanceTime(startTime); bool isPunctuation = endTime == long.MinValue; bool isInsert = startTime < endTime; bool isStartEdge = isInsert && endTime == StreamEvent.InfinitySyncTime; bool isEndEdge = !isInsert; if (isPunctuation) { AddToBatch(startTime, long.MinValue, ref sourceKey[row], ref sourcePayload[row], hash); } else if (isStartEdge) { // Add starting edge { vSync = startTime, vOther = StreamEvent.InfinitySyncTime }. AddToBatch( startTime, StreamEvent.InfinitySyncTime, ref sourceKey[row], ref sourcePayload[row], hash); // Add to active edges list to handle repeat at beats (and waiting for closing edge). int index = this.edges.Insert(hash); this.edges.Values[index].Populate( startTime, ref sourceKey[row], ref sourcePayload[row]); } else if (isEndEdge) { bool notCurrentlyOnBeat = startTime != this.currBeatTime; long edgeStartTime = endTime; long edgeEndTime = startTime; if (notCurrentlyOnBeat) { // Edges are only open if not on a beat. long lastBeatTime = this.currBeatTime - this.period; bool edgeStartedBeforeLastBeat = edgeStartTime < lastBeatTime; if (edgeStartedBeforeLastBeat) { // Add closing edge { vSync = edgeEndTime, vOther = lastBeatTime }. AddToBatch( edgeEndTime, lastBeatTime, ref sourceKey[row], ref sourcePayload[row], hash); } else { // Add closing edge { vSync = edgeEndTime, vOther = edgeStartTime }. AddToBatch( edgeEndTime, edgeStartTime, ref sourceKey[row], ref sourcePayload[row], hash); } } // Remove from active edges list. var edgesTraversal = this.edges.Find(hash); while (edgesTraversal.Next(out int index)) { var temp = this.edges.Values[index]; if (AreSame(edgeStartTime, ref sourceKey[row], ref sourcePayload[row], ref temp)) { edgesTraversal.Remove(); break; } } } else { long nextBeatTime = startTime == this.currBeatTime ? this.currBeatTime + this.period : this.currBeatTime; bool isLastBeatForInterval = endTime <= nextBeatTime; if (isLastBeatForInterval) { // Add interval { vSync = startTime, vOther = endTime }. AddToBatch(startTime, endTime, ref sourceKey[row], ref sourcePayload[row], hash); // No need to add to active list as interval ends <= nextBeatTime. } else { // Add interval { vSync = startTime, vOther = nextBeatTime }. AddToBatch(startTime, nextBeatTime, ref sourceKey[row], ref sourcePayload[row], hash); // Add to active list to handle repeat at beats. int index = this.intervals.Insert(hash); this.intervals.Values[index].Populate(endTime, ref sourceKey[row], ref sourcePayload[row]); } } } // Advance pointers. sourceHashPtr++; sourceVSyncPtr++; sourceVOtherPtr++; } } batch.Free(); }
public unsafe override void OnNext(StreamMessage <TKey, TPayload> batch) { batch.vsync = batch.vsync.MakeWritable(this.pool.longPool); batch.vother = batch.vother.MakeWritable(this.pool.longPool); batch.bitvector = batch.bitvector.MakeWritable(this.pool.bitvectorPool); var count = batch.Count; fixed(long *vsync = batch.vsync.col) fixed(long *vother = batch.vother.col) fixed(long *bv = batch.bitvector.col) for (int i = 0; i < count; i++) { int index; TPartitionKey partition; if ((bv[i >> 6] & (1L << (i & 0x3f))) != 0) { if (vother[i] == PartitionedStreamEvent.LowWatermarkOtherTime) { if ((this.startTimeSelector != null) && (vsync[i] < StreamEvent.InfinitySyncTime)) { vsync[i] = this.startTimeSelectorCompiled(vsync[i]); if (vsync[i] > StreamEvent.MaxSyncTime) { throw new ArgumentOutOfRangeException(); } } var iter = FastDictionary <TPartitionKey, long> .IteratorStart; while (this.lastSync.Iterate(ref iter)) { if (this.lastSync.entries[iter].value < vsync[i]) { this.lastSync.entries[iter].value = vsync[i]; } } } else if (vother[i] == PartitionedStreamEvent.PunctuationOtherTime) { partition = this.getPartitionKey(batch.key.col[i]); if (this.startTimeSelector != null) { vsync[i] = this.startTimeSelectorCompiled(vsync[i]); } if (this.lastSync.Lookup(partition, out index)) { this.lastSync.entries[index].value = Math.Max(vsync[i], this.lastSync.entries[index].value); } else { this.lastSync.Insert(ref index, partition, vsync[i]); } } continue; } partition = this.getPartitionKey(batch.key.col[i]); if (vsync[i] < vother[i]) { // insert event long old_vsync = vsync[i]; if (this.startTimeSelector != null) { vsync[i] = this.startTimeSelectorCompiled(vsync[i]); } if (vother[i] < StreamEvent.InfinitySyncTime) // not a start-edge { vother[i] = vsync[i] + this.durationSelectorCompiled(old_vsync, vother[i]); if (vother[i] > StreamEvent.MaxSyncTime) { throw new ArgumentOutOfRangeException(); } } if (vother[i] <= vsync[i]) { bv[i >> 6] |= (1L << (i & 0x3f)); } } else { if (vother[i] != long.MinValue) // not a CTI { // update the start time of the retract long old_vother = vother[i]; if (this.startTimeSelector != null) { vother[i] = this.startTimeSelectorCompiled(vother[i]); } // issue the correct end edge vsync[i] = vother[i] + this.durationSelectorCompiled(old_vother, vsync[i]); if (vother[i] >= vsync[i]) { bv[i >> 6] |= (1L << (i & 0x3f)); } } else if (this.startTimeSelector != null) { vsync[i] = this.startTimeSelectorCompiled(vsync[i]); } } if (!this.lastSync.Lookup(partition, out index)) { this.lastSync.Insert(ref index, partition, vsync[i]); } else if (vsync[i] < this.lastSync.entries[index].value) { throw new InvalidOperationException( "The operator AlterLifetime produced output out of sync-time order on an input event. The current internal sync time is " + this.lastSync.entries[index].value + ". The event's sync time is " + vsync[i].ToString(CultureInfo.InvariantCulture) + ". The event's partition key is " + partition.ToString() + ". The event's value is " + batch[i].ToString() + "."); } else { this.lastSync.entries[index].value = vsync[i]; } } this.Observer.OnNext(batch); }
protected override void ProcessRightBatch(StreamMessage <TKey, TRight> batch, out bool isBatchDone, out bool isBatchFree) { isBatchFree = true; while (true) { if (!GoToVisibleRow(batch)) { isBatchDone = true; return; } UpdateNextRightTime(batch.vsync.col[batch.iter]); if (this.nextRightTime > this.nextLeftTime) { isBatchDone = false; return; } if (batch.vother.col[batch.iter] == StreamEvent.PunctuationOtherTime) { AddPunctuationToBatch(batch.vsync.col[batch.iter]); batch.iter++; continue; } FastMap <ActiveEvent <TLeft> > .FindTraverser leftEdges = default; bool first = true; TKey key = default; TRight payload; var hash = batch.hash.col[batch.iter]; if (this.leftEdgeMap.Find(hash, ref leftEdges)) { while (leftEdges.Next(out int leftIndex)) { if (first) { key = batch.key.col[batch.iter]; first = false; } if (this.keyComparer(key, this.leftEdgeMap.Values[leftIndex].Key)) { payload = batch[batch.iter]; OutputStartEdge(this.nextRightTime, ref key, ref this.leftEdgeMap.Values[leftIndex].Payload, ref payload, hash); } } } if (!this.isLeftComplete) { if (first) { key = batch.key.col[batch.iter]; } payload = batch[batch.iter]; // potential rare recomputation int newIndex = this.rightEdgeMap.Insert(hash); this.rightEdgeMap.Values[newIndex].Populate(ref key, ref payload); } batch.iter++; } }
public override unsafe void OnNext(StreamMessage <Empty, TInput> batch) { this.batch.iter = batch.iter; var count = batch.Count; fixed(long *col_vsync = batch.vsync.col) fixed(long *col_vother = batch.vother.col) fixed(long *col_bv = batch.bitvector.col) { var colpayload = batch.payload.col; for (int i = 0; i < count; i++) { if ((col_bv[i >> 6] & (1L << (i & 0x3f))) != 0) { if (col_vother[i] == long.MinValue) { // We have found a row that corresponds to punctuation OnPunctuation(col_vsync[i]); int c = this.batch.Count; this.batch.vsync.col[c] = col_vsync[i]; this.batch.vother.col[c] = long.MinValue; this.batch.key.col[c] = Empty.Default; this.batch.hash.col[c] = 0; this.batch.bitvector.col[c >> 6] |= 1L << (c & 0x3f); this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } continue; } var colkey_i = this.keySelector(colpayload[i]); var col_hash_i = this.keyComparerGetHashCode(colkey_i); var syncTime = col_vsync[i]; bool cachedState = false; // Handle time moving forward if (syncTime > this.lastSyncTime) { /* Issue start edges for held aggregates */ if (this.currentState != null && this.heldAggregates.Count == 1) { // there is just one held aggregate, and currentState is set // so currentState has to be the held aggregate cachedState = true; if (this.currentState.active > 0) { int c = this.batch.Count; this.batch.vsync.col[c] = this.currentState.timestamp; this.batch.vother.col[c] = StreamEvent.InfinitySyncTime; this.batch.payload.col[c] = this.finalResultSelector(this.currentKey, this.computeResult(this.currentState.state)); this.batch.hash.col[c] = 0; this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } else { this.aggregateByKey.Remove(this.currentKey, this.currentHash); this.currentState = null; } } else { int iter1 = FastDictionary <TKey, HeldState <TState> > .IteratorStart; while (this.heldAggregates.Iterate(ref iter1)) { var iter1entry = this.heldAggregates.entries[iter1]; if (iter1entry.value.active > 0) { int c = this.batch.Count; this.batch.vsync.col[c] = iter1entry.value.timestamp; this.batch.vother.col[c] = StreamEvent.InfinitySyncTime; this.batch.payload.col[c] = this.finalResultSelector(iter1entry.key, this.computeResult(iter1entry.value.state)); this.batch.hash.col[c] = 0; this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } else { this.aggregateByKey.Remove(iter1entry.key); // , (currentKey, currentHash); this.currentState = null; } } // Time has moved forward, clear the held aggregates this.heldAggregates.Clear(); this.currentState = null; } // Since sync time changed, set lastSyncTime this.lastSyncTime = syncTime; } if (this.currentState == null || ((!this.isUngrouped) && (this.currentHash != col_hash_i || !this.keyComparerEquals(this.currentKey, colkey_i)))) { if (cachedState) { cachedState = false; this.heldAggregates.Clear(); } // Need to retrieve the key from the dictionary this.currentKey = colkey_i; this.currentHash = col_hash_i; if (!this.heldAggregates.Lookup(this.currentKey, this.currentHash, out int index)) { // First time group is active for this time if (!this.aggregateByKey.Lookup(this.currentKey, this.currentHash, out int aggindex)) { // New group. Create new state this.currentState = new HeldState <TState> { state = this.initialState(), timestamp = syncTime }; this.aggregateByKey.Insert(this.currentKey, this.currentState, this.currentHash); // No output because initial state is empty } else { this.currentState = this.aggregateByKey.entries[aggindex].value; if (syncTime > this.currentState.timestamp) { if (this.currentState.active > 0) { // Output end edge int c = this.batch.Count; this.batch.vsync.col[c] = syncTime; this.batch.vother.col[c] = this.currentState.timestamp; this.batch.payload.col[c] = this.finalResultSelector(this.currentKey, this.computeResult(this.currentState.state)); this.batch.hash.col[c] = 0; this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } this.currentState.timestamp = syncTime; } } this.heldAggregates.Insert(ref index, this.currentKey, this.currentState); } else { // read new currentState from _heldAgg index this.currentState = this.heldAggregates.entries[index].value; } } else { if (syncTime > this.currentState.timestamp) { if (this.currentState.active > 0) { // Output end edge int c = this.batch.Count; this.batch.vsync.col[c] = syncTime; this.batch.vother.col[c] = this.currentState.timestamp; this.batch.payload.col[c] = this.finalResultSelector(this.currentKey, this.computeResult(this.currentState.state)); this.batch.hash.col[c] = 0; this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } this.currentState.timestamp = syncTime; } } if (col_vsync[i] < col_vother[i]) // insert event { this.currentState.state = this.accumulate(this.currentState.state, col_vsync[i], colpayload[i]); this.currentState.active++; } else // is a retraction { this.currentState.state = this.deaccumulate(this.currentState.state, col_vsync[i], colpayload[i]); this.currentState.active--; } } } batch.Release(); batch.Return(); }
protected override void ProcessBothBatches(StreamMessage <TKey, TLeft> leftBatch, StreamMessage <TKey, TRight> rightBatch, out bool leftBatchDone, out bool rightBatchDone, out bool leftBatchFree, out bool rightBatchFree) { leftBatchFree = rightBatchFree = true; if (!GoToVisibleRow(leftBatch)) { leftBatchDone = true; rightBatchDone = false; return; } UpdateNextLeftTime(leftBatch.vsync.col[leftBatch.iter]); if (!GoToVisibleRow(rightBatch)) { leftBatchDone = false; rightBatchDone = true; return; } UpdateNextRightTime(rightBatch.vsync.col[rightBatch.iter]); FastMap <ActiveEvent <TRight> > .FindTraverser rightEdges = default; FastMap <ActiveEvent <TLeft> > .FindTraverser leftEdges = default; while (true) { bool leftPunctuation = leftBatch.vother.col[leftBatch.iter] == StreamEvent.PunctuationOtherTime; bool rightPunctuation = rightBatch.vother.col[rightBatch.iter] == StreamEvent.PunctuationOtherTime; if (this.nextLeftTime <= this.nextRightTime) { if (leftPunctuation) { AddPunctuationToBatch(this.nextLeftTime); } else { bool first = true; TKey key = default; TLeft payload; var hash = leftBatch.hash.col[leftBatch.iter]; if (this.rightEdgeMap.Find(hash, ref rightEdges)) { while (rightEdges.Next(out int rightIndex)) { if (first) { key = leftBatch.key.col[leftBatch.iter]; first = false; } if (this.keyComparer(key, this.rightEdgeMap.Values[rightIndex].Key)) { payload = leftBatch[leftBatch.iter]; OutputStartEdge(this.nextLeftTime, ref key, ref payload, ref this.rightEdgeMap.Values[rightIndex].Payload, hash); } } } if (!this.isRightComplete) { if (first) { key = leftBatch.key.col[leftBatch.iter]; } payload = leftBatch[leftBatch.iter]; // potential rare recomputation int newIndex = this.leftEdgeMap.Insert(hash); this.leftEdgeMap.Values[newIndex].Populate(ref key, ref payload); } } leftBatch.iter++; if (!GoToVisibleRow(leftBatch)) { leftBatchDone = true; rightBatchDone = false; return; } UpdateNextLeftTime(leftBatch.vsync.col[leftBatch.iter]); } else { if (rightPunctuation) { AddPunctuationToBatch(this.nextRightTime); } else { bool first = true; TKey key = default; TRight payload; var hash = rightBatch.hash.col[rightBatch.iter]; if (this.leftEdgeMap.Find(hash, ref leftEdges)) { while (leftEdges.Next(out int leftIndex)) { if (first) { key = rightBatch.key.col[rightBatch.iter]; first = false; } if (this.keyComparer(key, this.leftEdgeMap.Values[leftIndex].Key)) { payload = rightBatch[rightBatch.iter]; OutputStartEdge(this.nextRightTime, ref key, ref this.leftEdgeMap.Values[leftIndex].Payload, ref payload, hash); } } } if (!this.isLeftComplete) { if (first) { key = rightBatch.key.col[rightBatch.iter]; } payload = rightBatch[rightBatch.iter]; // potential rare recomputation int newIndex = this.rightEdgeMap.Insert(hash); this.rightEdgeMap.Values[newIndex].Populate(ref key, ref payload); } } rightBatch.iter++; if (!GoToVisibleRow(rightBatch)) { leftBatchDone = false; rightBatchDone = true; return; } UpdateNextRightTime(rightBatch.vsync.col[rightBatch.iter]); } } }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> input) { var count = input.Count; fixed(long *src_bv = input.bitvector.col, src_vsync = input.vsync.col, src_vother = input.vother.col) fixed(int *src_hash = input.hash.col) { long *vsync = src_vsync; long *vother = src_vother; int * hash = src_hash; for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0 || *vother < 0) { var partitionKey = this.getPartitionKey(input.key.col[i]); if (this.ClosedEvents.Lookup(partitionKey, out this.ClosedEventsIndex)) { this.ClosedEvents.Insert(ref this.ClosedEventsIndex, partitionKey, new SortedDictionary <long, FastDictionary2 <KHP, List <ActiveEvent> > >()); } if (this.OpenEvents.Lookup(partitionKey, out this.OpenEventsIndex)) { this.OpenEvents.Insert(ref this.OpenEventsIndex, partitionKey, this.OpenEventsGenerator()); } if (this.now.Lookup(partitionKey, out this.nowIndex)) { this.now.Insert(ref this.nowIndex, partitionKey, StreamEvent.MinSyncTime); } if (this.CurrentTimeOpenEventBufferTime.Lookup(partitionKey, out this.CurrentTimeOpenEventBufferTimeIndex)) { this.CurrentTimeOpenEventBufferTime.Insert(ref this.CurrentTimeOpenEventBufferTimeIndex, partitionKey, StreamEvent.MinSyncTime); } if (this.CurrentTimeOpenEventBuffer.Lookup(partitionKey, out this.CurrentTimeOpenEventBufferIndex)) { this.CurrentTimeOpenEventBuffer.Insert(ref this.CurrentTimeOpenEventBufferIndex, partitionKey, this.CurrentTimeOpenEventBufferGenerator()); } var sync = input.vsync.col[i]; if (this.now.entries[this.nowIndex].value < sync) { this.now.entries[this.nowIndex].value = sync; Purge(this.now.entries[this.nowIndex].value); } if (*vother == StreamEvent.InfinitySyncTime) { ActOnStart(input.payload.col[i], input.key.col[i], *hash, *vsync); } else if (*vother == PartitionedStreamEvent.LowWatermarkOtherTime) { PurgeGlobal(*vsync); this.batch.vsync.col[this.outputCount] = *vsync; this.batch.vother.col[this.outputCount] = *vother; this.batch[this.outputCount] = default; this.batch.key.col[this.outputCount] = default; this.batch.hash.col[this.outputCount] = 0; this.outputCount++; if (this.outputCount == Config.DataBatchSize) { FlushContents(); } } else if (*vother == PartitionedStreamEvent.PunctuationOtherTime) { Purge(*vsync); } else if (*vsync < *vother) { ActOnStart(input.payload.col[i], input.key.col[i], *hash, *vsync); ActOnEnd(input.payload.col[i], input.key.col[i], *hash, *vsync, *vother); } else { ActOnEnd(input.payload.col[i], input.key.col[i], *hash, *vother, *vsync); } } vsync++; vother++; hash++; } } input.Free(); }
protected override void ProcessLeftBatch(StreamMessage <TKey, TPayload> leftBatch, out bool leftBatchDone, out bool leftBatchFree) { leftBatchDone = true; leftBatchFree = false; this.Observer.OnNext(leftBatch); }
public override unsafe void OnNext(StreamMessage <Empty, TInput> batch) { var count = batch.Count; var colkey = batch.key.col; var colpayload = batch.payload.col; fixed(long *col_vsync = batch.vsync.col) fixed(long *col_vother = batch.vother.col) fixed(long *col_bv = batch.bitvector.col) for (int i = 0; i < count; i++) { if ((col_bv[i >> 6] & (1L << (i & 0x3f))) != 0) { if (col_vother[i] == StreamEvent.PunctuationOtherTime) { // We have found a row that corresponds to punctuation OnPunctuation(col_vsync[i]); int c = this.batch.Count; this.batch.vsync.col[c] = col_vsync[i]; this.batch.vother.col[c] = StreamEvent.PunctuationOtherTime; this.batch.key.col[c] = Empty.Default; this.batch.hash.col[c] = 0; this.batch.bitvector.col[c >> 6] |= (1L << (c & 0x3f)); this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } continue; } var syncTime = col_vsync[i]; // Handle time moving forward if (syncTime > this.lastSyncTime) { if (this.currentState != null) { int c = this.batch.Count; this.batch.vsync.col[c] = this.currentState.timestamp; this.batch.vother.col[c] = this.currentState.timestamp + this.hop; this.batch.payload.col[c] = this.computeResult(this.currentState.state); this.batch.key.col[c] = Empty.Default; this.batch.hash.col[c] = 0; this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } if (hasDisposableState) { DisposeStateLocal(); } this.currentState = null; } // Since sync time changed, set lastSyncTime this.lastSyncTime = syncTime; } if (this.currentState == null) { this.currentState = new HeldState <TState> { state = this.initialState(), timestamp = syncTime } } ; else { if (syncTime > this.currentState.timestamp) { // Reset currentState if (hasDisposableState) { DisposeStateLocal(); } this.currentState.state = this.initialState(); this.currentState.timestamp = syncTime; } } this.currentState.state = this.accumulate(this.currentState.state, col_vsync[i], colpayload[i]); } batch.Release(); batch.Return(); }
/// <summary> /// /// </summary> /// <param name="batch"></param> public void OnNext(StreamMessage <Empty, TPayload> batch) { this._queue.Add(batch); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var stack = new Stack <int>(); var count = batch.Count; var dest_vsync = this.batch.vsync.col; var dest_vother = this.batch.vother.col; var destkey = this.batch.key.col; var dest_hash = this.batch.hash.col; var srckey = batch.key.col; var activeFindTraverser = new FastMap <GroupedActiveState <TKey, TRegister> > .FindTraverser(this.activeStates); fixed(long *src_bv = batch.bitvector.col, src_vsync = batch.vsync.col, src_vother = batch.vother.col) { fixed(int *src_hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0) { var key = srckey[i]; var partitionKey = this.getPartitionKey(key); int partitionIndex = EnsurePartition(partitionKey); long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { int index; var tentativeVisibleTraverser = new FastMap <OutputEvent <TKey, TRegister> > .VisibleTraverser(this.tentativeOutput.entries[partitionIndex].value); if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { this.seenEvent.Remove(key); if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out index, out int hash)) { var elem = this.tentativeOutput.entries[partitionIndex].value.Values[index]; dest_vsync[this.iter] = this.lastSyncTime.entries[partitionIndex].value; dest_vother[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; destkey[this.iter] = elem.key; dest_hash[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } this.tentativeOutput.entries[partitionIndex].value.Clear(); // Clear the tentative output list } this.lastSyncTime.entries[partitionIndex].value = synctime; } if (this.seenEvent.Lookup(srckey[i], out index)) // Incoming event is a simultaneous one { if (this.seenEvent.entries[index].value == 1) // Detecting first duplicate, need to adjust state { this.seenEvent.entries[index].value = 2; // Delete tentative output for that key if (!this.IsSyncTimeSimultaneityFree) { var tentativeFindTraverser = new FastMap <OutputEvent <TKey, TRegister> > .FindTraverser(this.tentativeOutput.entries[partitionIndex].value); if (tentativeFindTraverser.Find(src_hash[i])) { while (tentativeFindTraverser.Next(out index)) { if (this.keyEqualityComparer(this.tentativeOutput.entries[partitionIndex].value.Values[index].key, srckey[i])) { tentativeFindTraverser.Remove(); } } } } // Delete active states for that key if (activeFindTraverser.Find(src_hash[i])) { while (activeFindTraverser.Next(out index)) { if (this.keyEqualityComparer(this.activeStates.Values[index].key, srckey[i])) { activeFindTraverser.Remove(); } } } } // Dont process this event continue; } else { this.seenEvent.Insert(srckey[i], 1); } } /* (1) Process currently active states */ bool ended = true; if (activeFindTraverser.Find(src_hash[i])) { int orig_index; // Track which active states need to be inserted after the current traversal var newActiveStates = new List <GroupedActiveState <TKey, TRegister> >(); while (activeFindTraverser.Next(out int index)) { orig_index = index; var state = this.activeStates.Values[index]; if (!this.keyEqualityComparer(state.key, srckey[i])) { continue; } if (state.PatternStartTimestamp + this.MaxDuration > synctime) { var currentStateMap = this.singleEventStateMap[state.state]; if (currentStateMap != null) { var m = currentStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = currentStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], state.register)) { var newReg = arcinfo.Transfer == null ? state.register : arcinfo.Transfer(synctime, batch[i], state.register); int ns = arcinfo.toState; while (true) { if (this.isFinal[ns]) { var otherTime = Math.Min(state.PatternStartTimestamp + this.MaxDuration, StreamEvent.InfinitySyncTime); if (!this.IsSyncTimeSimultaneityFree) { var tentativeOutputEntry = this.tentativeOutput.entries[partitionIndex].value; int ind = tentativeOutputEntry.Insert(src_hash[i]); tentativeOutputEntry.Values[ind].other = otherTime; tentativeOutputEntry.Values[ind].key = srckey[i]; tentativeOutputEntry.Values[ind].payload = newReg; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = otherTime; this.batch[this.iter] = newReg; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[ns]) { // Since we will eventually remove this state/index from activeStates, attempt to reuse this index for the outgoing state instead of deleting/re-adding // If index is already -1, this means we've already reused the state and must allocate/insert a new index for the outgoing state. if (index != -1) { this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].state = ns; this.activeStates.Values[index].register = newReg; this.activeStates.Values[index].PatternStartTimestamp = state.PatternStartTimestamp; index = -1; } else { // Do not attempt to insert directly into activeStates, as that could corrupt the traversal state. newActiveStates.Add(new GroupedActiveState <TKey, TRegister> { key = srckey[i], state = ns, register = newReg, PatternStartTimestamp = state.PatternStartTimestamp, }); } ended = false; // Add epsilon arc destinations to stack if (this.epsilonStateMap == null) { break; } if (this.epsilonStateMap[ns] != null) { for (int cnt2 = 0; cnt2 < this.epsilonStateMap[ns].Length; cnt2++) { stack.Push(this.epsilonStateMap[ns][cnt2]); } } } if (stack.Count == 0) { break; } ns = stack.Pop(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one successful transition } } } } } if (index == orig_index) { activeFindTraverser.Remove(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one active state } } // Now that we are done traversing the current active states, add any new ones. foreach (var newActiveState in newActiveStates) { this.activeStates.Insert(src_hash[i], newActiveState); } } /* (2) Start new activations from the start state(s) */ if (!this.AllowOverlappingInstances && !ended) { continue; } for (int counter = 0; counter < this.numStartStates; counter++) { int startState = this.startStates[counter]; var startStateMap = this.singleEventStateMap[startState]; if (startStateMap != null) { var m = startStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = startStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], this.defaultRegister)) { var newReg = arcinfo.Transfer == null ? this.defaultRegister : arcinfo.Transfer(synctime, batch[i], this.defaultRegister); int ns = arcinfo.toState; while (true) { if (this.isFinal[ns]) { var otherTime = Math.Min(synctime + this.MaxDuration, StreamEvent.InfinitySyncTime); if (!this.IsSyncTimeSimultaneityFree) { var tentativeOutputEntry = this.tentativeOutput.entries[partitionIndex].value; int ind = tentativeOutputEntry.Insert(src_hash[i]); tentativeOutputEntry.Values[ind].other = otherTime; tentativeOutputEntry.Values[ind].key = srckey[i]; tentativeOutputEntry.Values[ind].payload = newReg; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = otherTime; this.batch[this.iter] = newReg; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[ns]) { int index = this.activeStates.Insert(src_hash[i]); this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].state = ns; this.activeStates.Values[index].register = newReg; this.activeStates.Values[index].PatternStartTimestamp = synctime; // Add epsilon arc destinations to stack if (this.epsilonStateMap == null) { break; } if (this.epsilonStateMap[ns] != null) { for (int cnt2 = 0; cnt2 < this.epsilonStateMap[ns].Length; cnt2++) { stack.Push(this.epsilonStateMap[ns][cnt2]); } } } if (stack.Count == 0) { break; } ns = stack.Pop(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one successful transition } } } } if (this.IsDeterministic) { break; // We are guaranteed to have only one start state } } } else if (src_vother[i] == PartitionedStreamEvent.LowWatermarkOtherTime) { long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { // Clean active states for stale partitions int seenEventIndex; if (this.activeStates.Count > 0) { var activeVisibleTraverser = new FastMap <GroupedActiveState <TKey, TRegister> > .VisibleTraverser(this.activeStates); while (activeVisibleTraverser.Next(out int activeStateIndex, out _)) { var activeState = this.activeStates.Values[activeStateIndex]; if (synctime >= activeState.PatternStartTimestamp + this.MaxDuration) { // Since we know this partition is stale, remove it from seenEvent as well. this.seenEvent.Remove(activeState.key); this.activeStates.Remove(activeStateIndex); } } } // Clean seen events from stale partitions. This enumeration is necessary for stale partitions without active state. seenEventIndex = FastDictionary2 <TPartitionKey, long> .IteratorStart; int partitionIndex; while (this.seenEvent.Iterate(ref seenEventIndex)) { var partitionKey = this.getPartitionKey(this.seenEvent.entries[seenEventIndex].key); if (this.lastSyncTime.Lookup(partitionKey, out partitionIndex) && synctime > this.lastSyncTime.entries[partitionIndex].value) { this.seenEvent.Remove(this.seenEvent.entries[seenEventIndex].key); } } // Clean last synctime and tentative output from stale partitions (these two need to be kept in sync) partitionIndex = FastDictionary2 <TPartitionKey, long> .IteratorStart; while (this.lastSyncTime.Iterate(ref partitionIndex)) { // Check to see if partition is stale if (synctime > this.lastSyncTime.entries[partitionIndex].value) { // Emit tentative output from stale partitions var tentativeVisibleTraverser = new FastMap <OutputEvent <TKey, TRegister> > .VisibleTraverser(this.tentativeOutput.entries[partitionIndex].value); while (tentativeVisibleTraverser.Next(out int index, out int hash)) { var elem = this.tentativeOutput.entries[partitionIndex].value.Values[index]; this.batch.vsync.col[this.iter] = this.lastSyncTime.entries[partitionIndex].value; this.batch.vother.col[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; this.batch.key.col[this.iter] = elem.key; this.batch.hash.col[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } // Remove the partition var partitionKey = this.lastSyncTime.entries[partitionIndex].key; this.tentativeOutput.Remove(partitionKey); this.lastSyncTime.Remove(partitionKey); } } } // Update dest_* on low watermark in case this event will hit the batch boundary and allocate a new batch OnLowWatermark(synctime); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } else if (src_vother[i] == PartitionedStreamEvent.PunctuationOtherTime) { var key = srckey[i]; long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { var partitionKey = this.getPartitionKey(key); int partitionIndex = EnsurePartition(partitionKey); if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { this.seenEvent.Remove(srckey[i]); var tentativeVisibleTraverser = new FastMap <OutputEvent <TKey, TRegister> > .VisibleTraverser(this.tentativeOutput.entries[partitionIndex].value); if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out int index, out int hash)) { var elem = this.tentativeOutput.entries[partitionIndex].value.Values[index]; this.batch.vsync.col[this.iter] = this.lastSyncTime.entries[partitionIndex].value; this.batch.vother.col[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; this.batch.key.col[this.iter] = elem.key; this.batch.hash.col[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } this.tentativeOutput.entries[partitionIndex].value.Clear(); // Clear the tentative output list } this.lastSyncTime.entries[partitionIndex].value = synctime; } } this.batch.vsync.col[this.iter] = synctime; this.batch.vother.col[this.iter] = long.MinValue; this.batch.payload.col[this.iter] = default; this.batch.key.col[this.iter] = key; this.batch.hash.col[this.iter] = src_hash[i]; this.batch.bitvector.col[this.iter >> 6] |= (1L << (this.iter & 0x3f)); this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } } } batch.Free(); }
protected override void ProcessBothBatches(StreamMessage <TKey, TLeft> leftBatch, StreamMessage <TKey, TRight> rightBatch, out bool leftBatchDone, out bool rightBatchDone, out bool leftBatchFree, out bool rightBatchFree) { ProcessLeftBatch(leftBatch, out leftBatchDone, out leftBatchFree); ProcessRightBatch(rightBatch, out rightBatchDone, out rightBatchFree); }
protected override void ProcessBothBatches(StreamMessage <CompoundGroupKey <PartitionKey <TPartitionKey>, TGroupKey>, TLeft> leftBatch, StreamMessage <CompoundGroupKey <PartitionKey <TPartitionKey>, TGroupKey>, TRight> rightBatch, out bool leftBatchDone, out bool rightBatchDone, out bool leftBatchFree, out bool rightBatchFree) { ProcessLeftBatch(leftBatch, out leftBatchDone, out leftBatchFree); ProcessRightBatch(rightBatch, out rightBatchDone, out rightBatchFree); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var count = batch.Count; var srckey = batch.key.col; fixed(long *src_bv = batch.bitvector.col, src_vsync = batch.vsync.col, src_vother = batch.vother.col) { fixed(int *src_hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0) { var partitionKey = this.getPartitionKey(srckey[i]); int partitionIndex = EnsurePartition(partitionKey); long synctime = src_vsync[i]; if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { ProcessCurrentTimestamp(partitionIndex); this.lastSyncTime.entries[partitionIndex].value = synctime; } int keyHeads_index; bool keyHeadExists = false; var keyHeadsFindTraverser = new FastMap <TKey> .FindTraverser(this.keyHeads.entries[partitionIndex].value); if (keyHeadsFindTraverser.Find(src_hash[i])) { while (keyHeadsFindTraverser.Next(out keyHeads_index)) { if (!this.keyEqualityComparer(this.keyHeads.entries[partitionIndex].value.Values[keyHeads_index], srckey[i])) { continue; } // Found entry, this key has been processed before keyHeadExists = true; break; } } if (!keyHeadExists) { // Apply new transitions, update existing transitions bool found = this.activeFindTraverser.Find(src_hash[i]); if (found) { while (this.activeFindTraverser.Next(out int activeFind_index)) { var state = this.activeStates.Values[activeFind_index]; if (!this.keyEqualityComparer(state.key, srckey[i])) { continue; } // TODO: Found entry, create and accumulate new tentative transitions from current state if (state.PatternStartTimestamp + this.MaxDuration > synctime) { var currentStateMap = this.multiEventStateMap[state.toState]; if (currentStateMap != null) { var m = currentStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = currentStateMap[cnt]; if (activeFind_index == -1) { activeFind_index = this.activeStates.Insert(src_hash[i]); } this.activeStates.Values[activeFind_index].arcinfo = arcinfo; this.activeStates.Values[activeFind_index].key = state.key; this.activeStates.Values[activeFind_index].fromState = state.toState; this.activeStates.Values[activeFind_index].toState = arcinfo.toState; this.activeStates.Values[activeFind_index].PatternStartTimestamp = state.PatternStartTimestamp; this.activeStates.Values[activeFind_index].register = state.register; this.activeStates.Values[activeFind_index].accumulator = arcinfo.Initialize(synctime, state.register); this.activeStates.Values[activeFind_index].accumulator = arcinfo.Accumulate(synctime, batch.payload.col[i], state.register, this.activeStates.Values[activeFind_index].accumulator); activeFind_index = -1; } } } // Remove current state if (activeFind_index != -1) { this.activeFindTraverser.Remove(); } } } // Insert & accumulate new tentative transitions from start state for (int counter = 0; counter < this.numStartStates; counter++) { int startState = this.startStates[counter]; var startStateMap = this.multiEventStateMap[startState]; var m = startStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = startStateMap[cnt]; int index = this.activeFindTraverser.InsertAt(); // have to ensure the new states go to the end of the list this.activeStates.Values[index].arcinfo = arcinfo; this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].fromState = startState; this.activeStates.Values[index].toState = arcinfo.toState; this.activeStates.Values[index].PatternStartTimestamp = synctime; this.activeStates.Values[index].register = this.defaultRegister; this.activeStates.Values[index].accumulator = arcinfo.Initialize(synctime, this.defaultRegister); this.activeStates.Values[index].accumulator = arcinfo.Accumulate(synctime, batch.payload.col[i], this.defaultRegister, this.activeStates.Values[index].accumulator); } } // Update keyHeads to indicate that this key has been inserted keyHeads_index = this.keyHeads.entries[partitionIndex].value.Insert(src_hash[i]); this.keyHeads.entries[partitionIndex].value.Values[keyHeads_index] = srckey[i]; // Done processing this event continue; } // Not the first insert of this key for this timestamp, perform accumulate for all tentative states if (this.activeFindTraverser.Find(src_hash[i])) { while (this.activeFindTraverser.Next(out int activeFind_index)) { var state2 = this.activeStates.Values[activeFind_index]; if (!this.keyEqualityComparer(state2.key, srckey[i])) { continue; } // Found tentative entry, accumulate this.activeStates.Values[activeFind_index].accumulator = state2.arcinfo.Accumulate(synctime, batch.payload.col[i], state2.register, state2.accumulator); } } } else if (src_vother[i] == PartitionedStreamEvent.LowWatermarkOtherTime) { int partitionIndex = FastDictionary2 <TPartitionKey, List <TKey> > .IteratorStart; long synctime = src_vsync[i]; while (this.lastSyncTime.Iterate(ref partitionIndex)) { if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { ProcessCurrentTimestamp(partitionIndex); this.lastSyncTime.entries[partitionIndex].value = synctime; } } OnLowWatermark(synctime); } else if (src_vother[i] == PartitionedStreamEvent.PunctuationOtherTime) { var partitionKey = this.getPartitionKey(srckey[i]); int partitionIndex = EnsurePartition(partitionKey); long synctime = src_vsync[i]; if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { ProcessCurrentTimestamp(partitionIndex); this.lastSyncTime.entries[partitionIndex].value = synctime; } } } } } batch.Free(); }
public override unsafe void OnNext(StreamMessage <Empty, TInput> batch) { this.batch.iter = batch.iter; var count = batch.Count; var colpayload = batch.payload.col; fixed(long *col_vsync = batch.vsync.col) fixed(long *col_vother = batch.vother.col) fixed(int *col_hash = batch.hash.col) fixed(long *col_bv = batch.bitvector.col) { for (int i = 0; i < count; i++) { if ((col_bv[i >> 6] & (1L << (i & 0x3f))) != 0) { if (col_vother[i] == StreamEvent.PunctuationOtherTime) { // We have found a row that corresponds to punctuation OnPunctuation(col_vsync[i]); int c = this.batch.Count; this.batch.vsync.col[c] = col_vsync[i]; this.batch.vother.col[c] = StreamEvent.PunctuationOtherTime; this.batch.key.col[c] = Empty.Default; this.batch.hash.col[c] = 0; this.batch.bitvector.col[c >> 6] |= 1L << (c & 0x3f); this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } continue; } var syncTime = col_vsync[i]; // Handle time moving forward if (syncTime > this.lastSyncTime) { AdvanceTime(syncTime); } if (this.currentState == null) { this.currentEcqHeldState = null; this.currentState = new HeldState <TState> { state = this.initialState(), timestamp = syncTime }; this.held = true; // No output because initial state is empty } else { if (syncTime > this.currentState.timestamp) { if (this.currentState.active > 0) { // Output end edge int c = this.batch.Count; this.batch.vsync.col[c] = syncTime; this.batch.vother.col[c] = this.currentState.timestamp; this.batch.payload.col[c] = this.computeResult(this.currentState.state); this.batch.key.col[c] = Empty.Default; this.batch.hash.col[c] = 0; this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } this.currentState.timestamp = syncTime; this.held = true; } } if (col_vsync[i] < col_vother[i]) // insert event { this.currentState.state = this.accumulate(this.currentState.state, col_vsync[i], colpayload[i]); this.currentState.active++; // Update ECQ if (col_vother[i] < StreamEvent.InfinitySyncTime) { if ((this.currentEcqHeldState == null) || (this.currentEcqHeldState.timestamp != col_vother[i])) { if (this.ecq.Count > 0) { this.currentEcqHeldState = this.ecq.PeekLast(); if (this.currentEcqHeldState.timestamp != col_vother[i]) { this.currentEcqHeldState = new HeldState <TState> { state = this.initialState(), timestamp = col_vother[i] }; this.ecq.Enqueue(ref this.currentEcqHeldState); } } else { this.currentEcqHeldState = new HeldState <TState> { state = this.initialState(), timestamp = col_vother[i] }; this.ecq.Enqueue(ref this.currentEcqHeldState); } } this.currentEcqHeldState.state = this.accumulate(this.currentEcqHeldState.state, col_vsync[i], colpayload[i]); this.currentEcqHeldState.active++; } } else // is a retraction { this.currentState.state = this.deaccumulate(this.currentState.state, col_vsync[i], colpayload[i]); this.currentState.active--; } } } batch.Release(); batch.Return(); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var count = batch.Count; fixed(long *bv = batch.bitvector.col) fixed(long *vsync = batch.vsync.col) fixed(long *vother = batch.vother.col) { for (int i = 0; i < count; i++) { if ((bv[i >> 6] & (1L << (i & 0x3f))) == 0) { var partition = this.getPartitionKey(batch.key.col[i]); if (!this.partitionData.Lookup(partition, out int timeIndex)) { timeIndex = AllocatePartition(partition); } else if (batch.vsync.col[i] > this.partitionData.entries[timeIndex].value.lastSyncTime) { ReachTime(batch.vsync.col[i]); } if (batch.vother.col[i] == StreamEvent.InfinitySyncTime) // Start edge { int ind = this.output.Count++; this.output.vsync.col[ind] = vsync[i] - ((vsync[i] - this.offset) % this.progress + this.progress) % this.progress; this.output.vother.col[ind] = StreamEvent.InfinitySyncTime; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = batch[i]; this.output.hash.col[ind] = batch.hash.col[i]; if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } else if (batch.vother.col[i] > batch.vsync.col[i]) // Interval { int ind = this.output.Count++; this.output.vsync.col[ind] = vsync[i] - ((vsync[i] - this.offset) % this.progress + this.progress) % this.progress; var temp = Math.Max(vother[i] + this.skip - 1, vsync[i] + this.width); this.output.vother.col[ind] = temp - ((temp - (this.offset + this.width)) % this.skip + this.skip) % this.skip; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = batch[i]; this.output.hash.col[ind] = batch.hash.col[i]; if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } else // End edge { var endPointHeap = this.partitionData.entries[timeIndex].value.endPointHeap; var intervalMap = this.partitionData.entries[timeIndex].value.intervalMap; var temp = Math.Max(vsync[i] + this.skip - 1, vother[i] + this.width); int index = intervalMap.Insert(batch.hash.col[i]); intervalMap.Values[index].Populate(batch.key.col[i], batch[i], batch.hash.col[i], vother[i] - ((vother[i] - this.offset) % this.progress + this.progress) % this.progress); endPointHeap.Insert(temp - ((temp - (this.offset + this.width)) % this.skip + this.skip) % this.skip, index); } } else if (batch.vother.col[i] == PartitionedStreamEvent.LowWatermarkOtherTime) { ReachTime(batch.vsync.col[i]); int ind = this.output.Count++; this.output.vsync.col[ind] = vsync[i] - ((vsync[i] - this.offset) % this.progress + this.progress) % this.progress; this.output.vother.col[ind] = PartitionedStreamEvent.LowWatermarkOtherTime; this.output.key.col[ind] = default; this.output[ind] = default; this.output.hash.col[ind] = 0; this.output.bitvector.col[ind >> 6] |= 1L << (ind & 0x3f); if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } else if (batch.vother.col[i] == PartitionedStreamEvent.PunctuationOtherTime) { var partition = this.getPartitionKey(batch.key.col[i]); if (!this.partitionData.Lookup(partition, out int timeIndex)) { timeIndex = AllocatePartition(partition); } ReachTime(timeIndex, batch.vsync.col[i]); int ind = this.output.Count++; this.output.vsync.col[ind] = vsync[i] - ((vsync[i] - this.offset) % this.progress + this.progress) % this.progress; this.output.vother.col[ind] = long.MinValue; this.output.key.col[ind] = batch.key.col[i]; this.output[ind] = default; this.output.hash.col[ind] = batch.hash.col[i]; this.output.bitvector.col[ind >> 6] |= (1L << (ind & 0x3f)); if (this.output.Count == Config.DataBatchSize) { FlushContents(); } } } } batch.Free(); }
public unsafe override void OnNext(StreamMessage <TKey, TPayload> batch) { batch.vsync = batch.vsync.MakeWritable(this.pool.longPool); batch.vother = batch.vother.MakeWritable(this.pool.longPool); batch.bitvector = batch.bitvector.MakeWritable(this.pool.bitvectorPool); var count = batch.Count; fixed(long *vsync = batch.vsync.col) fixed(long *vother = batch.vother.col) fixed(long *bv = batch.bitvector.col) { for (int i = 0; i < count; i++) { if ((bv[i >> 6] & (1L << (i & 0x3f))) != 0) { if (vother[i] == long.MinValue) { if ((this.startTimeSelector != null) && (vsync[i] < StreamEvent.InfinitySyncTime)) { // create a new punctuation for the modified timestamp vsync[i] = this.startTimeSelectorCompiled(vsync[i]); if (vsync[i] > StreamEvent.MaxSyncTime) { throw new ArgumentOutOfRangeException(); } } this.lastSync = Math.Max(vsync[i], this.lastSync); } continue; } if (vsync[i] < vother[i]) { // insert event long old_vsync = vsync[i]; if (this.startTimeSelector != null) { vsync[i] = this.startTimeSelectorCompiled(vsync[i]); } if (vother[i] < StreamEvent.InfinitySyncTime) // not a start-edge { vother[i] = vsync[i] + this.durationSelectorCompiled(old_vsync, vother[i]); if (vother[i] > StreamEvent.MaxSyncTime) { throw new ArgumentOutOfRangeException(); } } if (vother[i] <= vsync[i]) { bv[i >> 6] |= (1L << (i & 0x3f)); } } else { if (vother[i] != long.MinValue) // not a CTI { // update the start time of the retract long old_vother = vother[i]; if (this.startTimeSelector != null) { vother[i] = this.startTimeSelectorCompiled(vother[i]); } // issue the correct end edge vsync[i] = vother[i] + this.durationSelectorCompiled(old_vother, vsync[i]); if (vother[i] >= vsync[i]) { bv[i >> 6] |= (1L << (i & 0x3f)); } } else { if (this.startTimeSelector != null) { vsync[i] = this.startTimeSelectorCompiled(vsync[i]); } } } if (vsync[i] < this.lastSync) { throw new InvalidOperationException( "The operator AlterLifetime produced output out of sync-time order on an input event. The current internal sync time is " + this.lastSync.ToString(CultureInfo.InvariantCulture) + ". The event's sync time is " + vsync[i].ToString(CultureInfo.InvariantCulture) + ". The event's value is " + batch[i].ToString() + "."); } this.lastSync = vsync[i]; } } this.Observer.OnNext(batch); }
public override unsafe void OnNext(StreamMessage <TKey, TInput> batch) { this.batch.iter = batch.iter; var count = batch.Count; var colkey = batch.key.col; var colpayload = batch.payload.col; fixed(long *col_vsync = batch.vsync.col) fixed(long *col_vother = batch.vother.col) fixed(int *col_hash = batch.hash.col) fixed(long *col_bv = batch.bitvector.col) for (int i = 0; i < count; i++) { if ((col_bv[i >> 6] & (1L << (i & 0x3f))) != 0) { if (col_vother[i] == StreamEvent.PunctuationOtherTime) { // We have found a row that corresponds to punctuation OnPunctuation(col_vsync[i]); int c = this.batch.Count; this.batch.vsync.col[c] = col_vsync[i]; this.batch.vother.col[c] = StreamEvent.PunctuationOtherTime; this.batch.key.col[c] = default; this.batch.hash.col[c] = 0; this.batch.bitvector.col[c >> 6] |= (1L << (c & 0x3f)); this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { FlushContents(); } } continue; } var syncTime = col_vsync[i]; // Handle time moving forward if (syncTime > this.lastSyncTime) { AdvanceTime(syncTime); } // Need to retrieve the key from the dictionary HeldState <TState> heldState; if (!this.aggregateByKey.Lookup(colkey[i], col_hash[i], out int aggindex)) { // New group. Create new state heldState = new HeldState <TState> { state = this.initialState(), timestamp = syncTime }; this.heldAggregates.Add(this.aggregateByKey.Insert(colkey[i], heldState, col_hash[i])); // No output because initial state is empty } else if (this.heldAggregates.Add(aggindex)) { // First time group is active for this time heldState = this.aggregateByKey.entries[aggindex].value; if (syncTime > heldState.timestamp) { if (heldState.active > 0) { // Output end edge int c = this.batch.Count; this.batch.vsync.col[c] = syncTime; this.batch.vother.col[c] = heldState.timestamp; this.batch.payload.col[c] = this.computeResult(heldState.state); this.batch.key.col[c] = colkey[i]; this.batch.hash.col[c] = colkey[i].GetHashCode(); this.batch.Count++; if (this.batch.Count == Config.DataBatchSize) { this.batch.iter = batch.iter; FlushContents(); this.batch.iter = batch.iter; } } heldState.timestamp = syncTime; } } else { // read new currentState from _heldAgg index heldState = this.aggregateByKey.entries[aggindex].value; } if (col_vsync[i] < col_vother[i]) // insert event { heldState.state = this.accumulate(heldState.state, col_vsync[i], colpayload[i]); heldState.active++; // Update ECQ if (col_vother[i] < StreamEvent.InfinitySyncTime) { FastDictionary <TKey, StateAndActive <TState> > state; int index; if (this.ecq.Count > 0) { if (!this.ecq.TryGetValue(col_vother[i], out state)) { this.ecqEntryPool.Get(out state); state.Lookup(colkey[i], col_hash[i], out index); state.Insert(ref index, colkey[i], new StateAndActive <TState> { state = this.initialState() }); this.ecq.Add(col_vother[i], state); } else { if (!state.Lookup(colkey[i], col_hash[i], out index)) { state.Insert(ref index, colkey[i], new StateAndActive <TState> { state = this.initialState() }); } } } else { this.ecqEntryPool.Get(out state); state.Lookup(colkey[i], col_hash[i], out index); state.Insert(ref index, colkey[i], new StateAndActive <TState> { state = this.initialState() }); this.ecq.Add(col_vother[i], state); } state.entries[index].value.state = this.accumulate(state.entries[index].value.state, col_vsync[i], colpayload[i]); state.entries[index].value.active++; } } else // is a retraction { heldState.state = this.deaccumulate(heldState.state, col_vsync[i], colpayload[i]); heldState.active--; } } batch.Release(); batch.Return(); }
public override unsafe void OnNext(StreamMessage <Empty, TPayload> batch) { var tentativeFindTraverser = new FastLinkedList <OutputEvent <Empty, TRegister> > .ListTraverser(this.tentativeOutput); var tentativeOutputIndex = 0; var count = batch.Count; var dest_vsync = this.batch.vsync.col; var dest_vother = this.batch.vother.col; var destkey = this.batch.key.col; var dest_hash = this.batch.hash.col; var srckey = batch.key.col; fixed(long *src_bv = batch.bitvector.col, src_vsync = batch.vsync.col) { fixed(int *src_hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0) { long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { if (synctime > this.lastSyncTime) // move time forward { this.seenEvent = 0; if (this.tentativeOutput.Count > 0) { tentativeOutputIndex = 0; while (this.tentativeOutput.Iterate(ref tentativeOutputIndex)) { var elem = this.tentativeOutput.Values[tentativeOutputIndex]; dest_vsync[this.iter] = this.lastSyncTime; dest_vother[this.iter] = elem.other; this.batch[this.iter] = elem.payload; dest_hash[this.iter] = 0; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } this.tentativeOutput.Clear(); // Clear the tentative output list } this.lastSyncTime = synctime; } if (this.seenEvent > 0) // Incoming event is a simultaneous one { if (this.seenEvent == 1) // Detecting first duplicate, need to adjust state { this.seenEvent = 2; // Delete tentative output for that key this.tentativeOutput.Clear(); // Delete active states for that key this.activeState_state = -1; } // Dont process this event continue; } else { this.seenEvent = 1; } } /* (1) Process currently active states */ if (this.activeState_state >= 0) { if (this.activeState_PatternStartTimestamp + this.MaxDuration > synctime) { var currentStateMap = this.singleEventStateMap[this.activeState_state]; this.activeState_state = -1; // assume the arc does not fire if (currentStateMap != null) { var m = currentStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = currentStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], this.activeState_register)) { if (arcinfo.Transfer != null) { this.activeState_register = arcinfo.Transfer(synctime, batch[i], this.activeState_register); } this.activeState_state = arcinfo.toState; if (this.isFinal[this.activeState_state]) { var otherTime = Math.Min(this.activeState_PatternStartTimestamp + this.MaxDuration, StreamEvent.InfinitySyncTime); if (!this.IsSyncTimeSimultaneityFree) { int ind = this.tentativeOutput.Insert(); this.tentativeOutput.Values[ind].other = otherTime; this.tentativeOutput.Values[ind].key = srckey[i]; this.tentativeOutput.Values[ind].payload = this.activeState_register; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = otherTime; this.batch[this.iter] = this.activeState_register; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[this.activeState_state]) { this.activeState_PatternStartTimestamp = synctime; } else { this.activeState_state = -1; } break; // DFA, so only one arc fires } } } } } /* (2) Start new activations from the start state(s) */ if (this.activeState_state >= 0) { continue; } var startStateMap = this.singleEventStateMap[this.startState]; if (startStateMap != null) { var m = startStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = startStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], this.defaultRegister)) { this.activeState_register = arcinfo.Transfer != null ? arcinfo.Transfer(synctime, batch[i], this.defaultRegister) : this.defaultRegister; this.activeState_state = arcinfo.toState; if (this.isFinal[this.activeState_state]) { var otherTime = Math.Min(synctime + this.MaxDuration, StreamEvent.InfinitySyncTime); if (!this.IsSyncTimeSimultaneityFree) { int ind = this.tentativeOutput.Insert(); this.tentativeOutput.Values[ind].other = otherTime; this.tentativeOutput.Values[ind].key = srckey[i]; this.tentativeOutput.Values[ind].payload = this.activeState_register; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = otherTime; this.batch[this.iter] = this.activeState_register; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[this.activeState_state]) { this.activeState_PatternStartTimestamp = synctime; } else { this.activeState_state = -1; } break; } } } } else if (batch.vother.col[i] < 0) { long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { if (synctime > this.lastSyncTime) // move time forward { this.seenEvent = 0; if (this.tentativeOutput.Count > 0) { tentativeOutputIndex = 0; while (this.tentativeOutput.Iterate(ref tentativeOutputIndex)) { var elem = this.tentativeOutput.Values[tentativeOutputIndex]; this.batch.vsync.col[this.iter] = this.lastSyncTime; this.batch.vother.col[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; this.batch.hash.col[this.iter] = 0; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); } } this.tentativeOutput.Clear(); // Clear the tentative output list } this.lastSyncTime = synctime; } } OnPunctuation(synctime); } } } } batch.Free(); }