public void TestVersusDictionary() { FastMap <int, String> actual = new FastMap <int, String>(1, 1000000); IDictionary <int, String> expected = new Dictionary <int, String>(1000000); Random r = RandomUtils.GetRandom(); for (int i = 0; i < 1000000; i++) { double d = r.NextDouble(); int key = r.Next(100); if (d < 0.4) { Assert.AreEqual(expected[key], actual[key]); } else { if (d < 0.7) { //Assert.AreEqual(expected.Add(key, "foo"), actual.Add(key,"foo")); } else { Assert.AreEqual(expected.Remove(key), actual.Remove(key)); } Assert.AreEqual(expected.Count, actual.Count); } } }
public void TestRemove() { FastMap<String, String> map = new FastMap<String, String>(); map.Add("foo", "bar"); map.Remove("foo"); Assert.AreEqual(0, map.Count); Assert.IsTrue(map.IsEmpty); Assert.IsNull(map["foo"]); }
public void TestRehash() { FastMap <String, String> map = BuildTestFastMap(); map.Remove("foo"); map.Rehash(); Assert.IsNull(map["foo"]); Assert.AreEqual("bang", map["baz"]); }
public void TestRemove() { FastMap <String, String> map = new FastMap <String, String>(); map.Add("foo", "bar"); map.Remove("foo"); Assert.AreEqual(0, map.Count); Assert.IsTrue(map.IsEmpty); Assert.IsNull(map["foo"]); }
public void TestSizeEmpty() { FastMap<String, String> map = new FastMap<String, String>(); Assert.AreEqual(0, map.Count); Assert.IsTrue(map.IsEmpty); map.Add("foo", "bar"); Assert.AreEqual(1, map.Count); Assert.IsFalse(map.IsEmpty); map.Remove("foo"); Assert.AreEqual(0, map.Count); Assert.IsTrue(map.IsEmpty); }
private static void ReuseTest() { var map = new FastMap <string>(5); Assert.AreEqual(1, map.Insert(5, "0")); Assert.AreEqual(2, map.Insert(5, "1")); Assert.AreEqual(3, map.Insert(5, "2")); Assert.AreEqual(4, map.Insert(5, "3")); Assert.AreEqual(5, map.Insert(5, "4")); map.Remove(3); Assert.AreEqual(3, map.Insert(5, "5")); map.Remove(2); map.Remove(1); map.Remove(5); map.Remove(3); map.Remove(4); Assert.AreEqual(4, map.Insert(5, "6")); Assert.AreEqual(3, map.Insert(5, "7")); Assert.AreEqual(5, map.Insert(5, "8")); Assert.AreEqual(1, map.Insert(5, "9")); Assert.AreEqual(2, map.Insert(5, "10")); Assert.AreEqual(6, map.Insert(5, "11")); }
public void TestSizeEmpty() { FastMap <String, String> map = new FastMap <String, String>(); Assert.AreEqual(0, map.Count); Assert.IsTrue(map.IsEmpty); map.Add("foo", "bar"); Assert.AreEqual(1, map.Count); Assert.IsFalse(map.IsEmpty); map.Remove("foo"); Assert.AreEqual(0, map.Count); Assert.IsTrue(map.IsEmpty); }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var stack = new Stack <int>(); var count = batch.Count; var dest_vsync = this.batch.vsync.col; var dest_vother = this.batch.vother.col; var destkey = this.batch.key.col; var dest_hash = this.batch.hash.col; var srckey = batch.key.col; var activeFindTraverser = new FastMap <GroupedActiveState <TKey, TRegister> > .FindTraverser(this.activeStates); fixed(long *src_bv = batch.bitvector.col, src_vsync = batch.vsync.col, src_vother = batch.vother.col) { fixed(int *src_hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0) { var key = srckey[i]; var partitionKey = this.getPartitionKey(key); int partitionIndex = EnsurePartition(partitionKey); var tentativeVisibleTraverser = new FastMap <OutputEvent <TKey, TRegister> > .VisibleTraverser(this.tentativeOutput.entries[partitionIndex].value); long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { int index; if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { foreach (var mapIndex in this.seenPartitions.entries[partitionIndex].value) { this.seenEvent.Remove(mapIndex); } if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out index, out int hash)) { var elem = this.tentativeOutput.entries[partitionIndex].value.Values[index]; dest_vsync[this.iter] = this.lastSyncTime.entries[partitionIndex].value; dest_vother[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; destkey[this.iter] = elem.key; dest_hash[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } this.tentativeOutput.entries[partitionIndex].value.Clear(); // Clear the tentative output list } this.lastSyncTime.entries[partitionIndex].value = synctime; } if (this.seenEvent.Lookup(srckey[i], out index)) // Incoming event is a simultaneous one { if (this.seenEvent.entries[index].value == 1) // Detecting first duplicate, need to adjust state { this.seenEvent.entries[index].value = 2; // Delete tentative output for that key var tentativeFindTraverser = new FastMap <OutputEvent <TKey, TRegister> > .FindTraverser(this.tentativeOutput.entries[partitionIndex].value); if (tentativeFindTraverser.Find(src_hash[i])) { while (tentativeFindTraverser.Next(out index)) { if (this.keyEqualityComparer(this.tentativeOutput.entries[partitionIndex].value.Values[index].key, srckey[i])) { tentativeFindTraverser.Remove(); } } } // Delete active states for that key if (activeFindTraverser.Find(src_hash[i])) { while (activeFindTraverser.Next(out index)) { if (this.keyEqualityComparer(this.activeStates.Values[index].key, srckey[i])) { activeFindTraverser.Remove(); } } } } // Dont process this event continue; } else { this.seenEvent.Insert(srckey[i], 1); } } /* (1) Process currently active states */ bool ended = true; if (activeFindTraverser.Find(src_hash[i])) { int orig_index; while (activeFindTraverser.Next(out int index)) { orig_index = index; var state = this.activeStates.Values[index]; if (!this.keyEqualityComparer(state.key, srckey[i])) { continue; } if (state.PatternStartTimestamp + this.MaxDuration > synctime) { var currentStateMap = this.singleEventStateMap[state.state]; if (currentStateMap != null) { var m = currentStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = currentStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], state.register)) { TRegister newReg; if (arcinfo.Transfer == null) { newReg = state.register; } else { newReg = arcinfo.Transfer(synctime, batch[i], state.register); } int ns = arcinfo.toState; while (true) { if (this.isFinal[ns]) { if (!this.IsSyncTimeSimultaneityFree) { var tentativeOutputEntry = this.tentativeOutput.entries[partitionIndex].value; int ind = tentativeOutputEntry.Insert(src_hash[i]); tentativeOutputEntry.Values[ind].other = state.PatternStartTimestamp + this.MaxDuration; tentativeOutputEntry.Values[ind].key = srckey[i]; tentativeOutputEntry.Values[ind].payload = newReg; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = state.PatternStartTimestamp + this.MaxDuration; this.batch[this.iter] = newReg; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[ns]) { if (index == -1) { index = this.activeStates.Insert(src_hash[i]); } this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].state = ns; this.activeStates.Values[index].register = newReg; this.activeStates.Values[index].PatternStartTimestamp = state.PatternStartTimestamp; index = -1; ended = false; // Add epsilon arc destinations to stack if (this.epsilonStateMap == null) { break; } if (this.epsilonStateMap[ns] != null) { for (int cnt2 = 0; cnt2 < this.epsilonStateMap[ns].Length; cnt2++) { stack.Push(this.epsilonStateMap[ns][cnt2]); } } } if (stack.Count == 0) { break; } ns = stack.Pop(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one successful transition } } } } } if (index == orig_index) { activeFindTraverser.Remove(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one active state } } } /* (2) Start new activations from the start state(s) */ if (!this.AllowOverlappingInstances && !ended) { continue; } for (int counter = 0; counter < this.numStartStates; counter++) { int startState = this.startStates[counter]; var startStateMap = this.singleEventStateMap[startState]; if (startStateMap != null) { var m = startStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = startStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], this.defaultRegister)) { TRegister newReg; if (arcinfo.Transfer == null) { newReg = this.defaultRegister; } else { newReg = arcinfo.Transfer(synctime, batch[i], this.defaultRegister); } int ns = arcinfo.toState; while (true) { if (this.isFinal[ns]) { if (!this.IsSyncTimeSimultaneityFree) { var tentativeOutputEntry = this.tentativeOutput.entries[partitionIndex].value; int ind = tentativeOutputEntry.Insert(src_hash[i]); tentativeOutputEntry.Values[ind].other = synctime + this.MaxDuration; tentativeOutputEntry.Values[ind].key = srckey[i]; tentativeOutputEntry.Values[ind].payload = newReg; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = synctime + this.MaxDuration; this.batch[this.iter] = newReg; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[ns]) { int index = this.activeStates.Insert(src_hash[i]); this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].state = ns; this.activeStates.Values[index].register = newReg; this.activeStates.Values[index].PatternStartTimestamp = synctime; // Add epsilon arc destinations to stack if (this.epsilonStateMap == null) { break; } if (this.epsilonStateMap[ns] != null) { for (int cnt2 = 0; cnt2 < this.epsilonStateMap[ns].Length; cnt2++) { stack.Push(this.epsilonStateMap[ns][cnt2]); } } } if (stack.Count == 0) { break; } ns = stack.Pop(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one successful transition } } } } if (this.IsDeterministic) { break; // We are guaranteed to have only one start state } } } else if (src_vother[i] == PartitionedStreamEvent.LowWatermarkOtherTime) { long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { var partitionIndex = FastDictionary2 <TPartitionKey, long> .IteratorStart; while (this.tentativeOutput.Iterate(ref partitionIndex)) { if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { var tentativeVisibleTraverser = new FastMap <OutputEvent <TKey, TRegister> > .VisibleTraverser(this.tentativeOutput.entries[partitionIndex].value); foreach (var mapIndex in this.seenPartitions.entries[partitionIndex].value) { this.seenEvent.Remove(mapIndex); } if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out int index, out int hash)) { var elem = this.tentativeOutput.entries[partitionIndex].value.Values[index]; this.batch.vsync.col[this.iter] = this.lastSyncTime.entries[partitionIndex].value; this.batch.vother.col[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; this.batch.key.col[this.iter] = elem.key; this.batch.hash.col[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); } } this.tentativeOutput.entries[partitionIndex].value.Clear(); // Clear the tentative output list } this.lastSyncTime.entries[partitionIndex].value = synctime; } } } OnLowWatermark(synctime); } else if (src_vother[i] == PartitionedStreamEvent.PunctuationOtherTime) { var key = srckey[i]; long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { var partitionKey = this.getPartitionKey(key); int partitionIndex = EnsurePartition(partitionKey); if (synctime > this.lastSyncTime.entries[partitionIndex].value) // move time forward { var tentativeVisibleTraverser = new FastMap <OutputEvent <TKey, TRegister> > .VisibleTraverser(this.tentativeOutput.entries[partitionIndex].value); foreach (var mapIndex in this.seenPartitions.entries[partitionIndex].value) { this.seenEvent.Remove(mapIndex); } if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out int index, out int hash)) { var elem = this.tentativeOutput.entries[partitionIndex].value.Values[index]; this.batch.vsync.col[this.iter] = this.lastSyncTime.entries[partitionIndex].value; this.batch.vother.col[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; this.batch.key.col[this.iter] = elem.key; this.batch.hash.col[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); } } this.tentativeOutput.entries[partitionIndex].value.Clear(); // Clear the tentative output list } this.lastSyncTime.entries[partitionIndex].value = synctime; } } this.batch.vsync.col[this.iter] = synctime; this.batch.vother.col[this.iter] = long.MinValue; this.batch.payload.col[this.iter] = default; this.batch.key.col[this.iter] = key; this.batch.hash.col[this.iter] = src_hash[i]; this.batch.bitvector.col[this.iter >> 6] |= (1L << (this.iter & 0x3f)); this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); } } } } } batch.Free(); }
private static void SimpleMapTest() { var map = new FastMap <string>(); Assert.IsTrue(map.IsEmpty); Assert.AreEqual(0, map.Count); Assert.IsFalse(map.Find(5).Next(out _)); Assert.IsFalse(map.Find(6).Next(out _)); Assert.IsFalse(map.Find(7).Next(out _)); int indexA5 = map.Insert(5, "a"); Assert.IsFalse(map.IsEmpty); Assert.AreEqual(1, map.Count); Assert.IsTrue(map.Find(5).Next(out var index)); Assert.AreEqual(indexA5, index); Assert.IsFalse(map.Find(6).Next(out _)); Assert.IsFalse(map.Find(7).Next(out _)); int indexB6 = map.Insert(6, "b"); int indexA5Two = map.Insert(5, "a"); int indexB5 = map.Insert(5, "b"); Assert.IsTrue(indexA5 != indexA5Two); Assert.IsTrue(indexA5 != indexB5); Assert.IsTrue(indexA5Two != indexB5); Assert.IsFalse(map.IsEmpty); Assert.AreEqual(4, map.Count); var traverser = map.Find(5); Assert.IsTrue(traverser.Next(out index)); Assert.AreEqual(indexB5, index); Assert.IsTrue(traverser.Next(out index)); Assert.AreEqual(indexA5Two, index); Assert.IsTrue(traverser.Next(out index)); Assert.AreEqual(indexA5, index); Assert.IsFalse(traverser.Next(out _)); traverser = map.Find(6); Assert.IsTrue(traverser.Next(out index)); Assert.AreEqual(indexB6, index); Assert.IsFalse(traverser.Next(out _)); Assert.IsFalse(map.Find(7).Next(out _)); map.Remove(indexA5); Assert.IsFalse(map.IsEmpty); Assert.AreEqual(3, map.Count); traverser = map.Find(5); Assert.IsTrue(traverser.Next(out index)); Assert.AreEqual(indexB5, index); Assert.IsTrue(traverser.Next(out index)); Assert.AreEqual(indexA5Two, index); Assert.IsFalse(traverser.Next(out _)); traverser = map.Find(6); Assert.IsTrue(traverser.Next(out index)); Assert.AreEqual(indexB6, index); Assert.IsFalse(traverser.Next(out _)); Assert.IsFalse(map.Find(7).Next(out _)); map.Remove(indexA5Two); map.Remove(indexB6); Assert.IsFalse(map.IsEmpty); Assert.AreEqual(1, map.Count); traverser = map.Find(5); Assert.IsTrue(traverser.Next(out index)); Assert.AreEqual(indexB5, index); Assert.IsFalse(traverser.Next(out _)); Assert.IsFalse(map.Find(6).Next(out _)); Assert.IsFalse(map.Find(7).Next(out _)); map.Remove(indexB5); Assert.IsTrue(map.IsEmpty); Assert.AreEqual(0, map.Count); Assert.IsFalse(map.Find(5).Next(out _)); Assert.IsFalse(map.Find(6).Next(out _)); Assert.IsFalse(map.Find(7).Next(out _)); }
private static void LargerTest() { var map = new FastMap <string>(1); const int Size = 100; for (int i = 0; i < Size; i++) { map.Insert(i, "one" + i); map.Insert(i, "two" + i); } for (int i = 0; i < Size; i++) { map.Insert(i, "three" + i); } Assert.AreEqual(Size * 3, map.Count); for (int i = 0; i < Size; i++) { var traverse = map.Find(i); bool hasOne = false; bool hasTwo = false; bool hasThree = false; for (int j = 0; j < 3; j++) { Assert.IsTrue(traverse.Next(out var index)); if (map.Values[index] == "one" + i) { hasOne = true; } else if (map.Values[index] == "two" + i) { hasTwo = true; } else if (map.Values[index] == "three" + i) { hasThree = true; } else { Assert.Fail(); } } Assert.IsFalse(traverse.Next(out _)); Assert.IsTrue(hasOne); Assert.IsTrue(hasTwo); Assert.IsTrue(hasThree); } for (int i = 0; i < Size; i++) { var traverse = map.Find(i); while (traverse.Next(out int index)) { if (map.Values[index] == "two" + i) { map.Remove(index); break; } } } Assert.AreEqual(Size * 2, map.Count); for (int i = 0; i < Size; i++) { var traverse = map.Find(i); bool hasOne = false; bool hasThree = false; for (int j = 0; j < 2; j++) { Assert.IsTrue(traverse.Next(out var index)); if (map.Values[index] == "one" + i) { hasOne = true; } else if (map.Values[index] == "three" + i) { hasThree = true; } else { Assert.Fail(); } } Assert.IsFalse(traverse.Next(out _)); Assert.IsTrue(hasOne); Assert.IsTrue(hasThree); } for (int i = 0; i < Size; i++) { var traverse = map.Find(i); while (traverse.Next(out int index)) { if (map.Values[index] == "one" + i) { map.Remove(index); break; } } } Assert.AreEqual(Size, map.Count); for (int i = 0; i < Size; i++) { var traverse = map.Find(i); Assert.IsTrue(traverse.Next(out int index)); Assert.AreEqual("three" + i, map.Values[index]); Assert.IsFalse(traverse.Next(out _)); } for (int i = 0; i < Size; i++) { var traverse = map.Find(i); while (traverse.Next(out int index)) { if (map.Values[index] == "three" + i) { map.Remove(index); break; } } } Assert.IsTrue(map.IsEmpty); }
public void TestVersusDictionary() { FastMap<int, String> actual = new FastMap<int, String>(1, 1000000); IDictionary<int, String> expected = new Dictionary<int, String>(1000000); Random r = RandomUtils.GetRandom(); for (int i = 0; i < 1000000; i++) { double d = r.NextDouble(); int key = r.Next(100); if (d < 0.4) { Assert.AreEqual(expected[key], actual[key]); } else { if (d < 0.7) { //Assert.AreEqual(expected.Add(key, "foo"), actual.Add(key,"foo")); } else { Assert.AreEqual(expected.Remove(key), actual.Remove(key)); } Assert.AreEqual(expected.Count, actual.Count); } } }
public override unsafe void OnNext(StreamMessage <TKey, TPayload> batch) { var stack = new Stack <int>(); var activeFindTraverser = new FastMap <GroupedActiveState <TKey, TRegister> > .FindTraverser(this.activeStates); var tentativeFindTraverser = new FastMap <OutputEvent <TKey, TRegister> > .FindTraverser(this.tentativeOutput); var tentativeVisibleTraverser = new FastMap <OutputEvent <TKey, TRegister> > .VisibleTraverser(this.tentativeOutput); var count = batch.Count; var dest_vsync = this.batch.vsync.col; var dest_vother = this.batch.vother.col; var destkey = this.batch.key.col; var dest_hash = this.batch.hash.col; var srckey = batch.key.col; fixed(long *src_bv = batch.bitvector.col, src_vsync = batch.vsync.col) { fixed(int *src_hash = batch.hash.col) { for (int i = 0; i < count; i++) { if ((src_bv[i >> 6] & (1L << (i & 0x3f))) == 0) { long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree) { int index; if (synctime > this.lastSyncTime) // move time forward { this.seenEvent.Clear(); if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out index, out int hash)) { var elem = this.tentativeOutput.Values[index]; dest_vsync[this.iter] = this.lastSyncTime; dest_vother[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; destkey[this.iter] = elem.key; dest_hash[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } this.tentativeOutput.Clear(); // Clear the tentative output list } this.lastSyncTime = synctime; } if (this.seenEvent.Lookup(srckey[i], out index)) // Incoming event is a simultaneous one { if (this.seenEvent.entries[index].value == 1) // Detecting first duplicate, need to adjust state { this.seenEvent.entries[index].value = 2; // Delete tentative output for that key if (tentativeFindTraverser.Find(src_hash[i])) { while (tentativeFindTraverser.Next(out index)) { if (this.keyEqualityComparer(this.tentativeOutput.Values[index].key, srckey[i])) { tentativeFindTraverser.Remove(); } } } // Delete active states for that key if (activeFindTraverser.Find(src_hash[i])) { while (activeFindTraverser.Next(out index)) { if (this.keyEqualityComparer(this.activeStates.Values[index].key, srckey[i])) { activeFindTraverser.Remove(); } } } } // Dont process this event continue; } else { this.seenEvent.Insert(ref index, srckey[i], 1); } } /* (1) Process currently active states */ bool ended = true; if (activeFindTraverser.Find(src_hash[i])) { int orig_index; // Track which active states need to be inserted after the current traversal var newActiveStates = new List <GroupedActiveState <TKey, TRegister> >(); while (activeFindTraverser.Next(out int index)) { orig_index = index; var state = this.activeStates.Values[index]; if (!this.keyEqualityComparer(state.key, srckey[i])) { continue; } if (state.PatternStartTimestamp + this.MaxDuration > synctime) { var currentStateMap = this.singleEventStateMap[state.state]; if (currentStateMap != null) { var m = currentStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = currentStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], state.register)) { var newReg = arcinfo.Transfer == null ? state.register : arcinfo.Transfer(synctime, batch[i], state.register); int ns = arcinfo.toState; while (true) { if (this.isFinal[ns]) { var otherTime = Math.Min(state.PatternStartTimestamp + this.MaxDuration, StreamEvent.InfinitySyncTime); if (!this.IsSyncTimeSimultaneityFree) { int ind = this.tentativeOutput.Insert(src_hash[i]); this.tentativeOutput.Values[ind].other = otherTime; this.tentativeOutput.Values[ind].key = srckey[i]; this.tentativeOutput.Values[ind].payload = newReg; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = otherTime; this.batch[this.iter] = newReg; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[ns]) { // Since we will eventually remove this state/index from activeStates, attempt to reuse this index for the outgoing state instead of deleting/re-adding // If index is already -1, this means we've already reused the state and must allocate/insert a new index for the outgoing state. if (index != -1) { this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].state = ns; this.activeStates.Values[index].register = newReg; this.activeStates.Values[index].PatternStartTimestamp = state.PatternStartTimestamp; index = -1; } else { // Do not attempt to insert directly into activeStates, as that could corrupt the traversal state. newActiveStates.Add(new GroupedActiveState <TKey, TRegister> { key = srckey[i], state = ns, register = newReg, PatternStartTimestamp = state.PatternStartTimestamp, }); } ended = false; // Add epsilon arc destinations to stack if (this.epsilonStateMap == null) { break; } if (this.epsilonStateMap[ns] != null) { for (int cnt2 = 0; cnt2 < this.epsilonStateMap[ns].Length; cnt2++) { stack.Push(this.epsilonStateMap[ns][cnt2]); } } } if (stack.Count == 0) { break; } ns = stack.Pop(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one successful transition } } } } } if (index == orig_index) { activeFindTraverser.Remove(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one active state } } // Now that we are done traversing the current active states, add any new ones. foreach (var newActiveState in newActiveStates) { this.activeStates.Insert(src_hash[i], newActiveState); } } /* (2) Start new activations from the start state(s) */ if (!this.AllowOverlappingInstances && !ended) { continue; } for (int counter = 0; counter < this.numStartStates; counter++) { int startState = this.startStates[counter]; var startStateMap = this.singleEventStateMap[startState]; if (startStateMap != null) { var m = startStateMap.Length; for (int cnt = 0; cnt < m; cnt++) { var arcinfo = startStateMap[cnt]; if (arcinfo.Fence(synctime, batch[i], this.defaultRegister)) { var newReg = arcinfo.Transfer == null ? this.defaultRegister : arcinfo.Transfer(synctime, batch[i], this.defaultRegister); int ns = arcinfo.toState; while (true) { if (this.isFinal[ns]) { var otherTime = Math.Min(synctime + this.MaxDuration, StreamEvent.InfinitySyncTime); if (!this.IsSyncTimeSimultaneityFree) { int ind = this.tentativeOutput.Insert(src_hash[i]); this.tentativeOutput.Values[ind].other = otherTime; this.tentativeOutput.Values[ind].key = srckey[i]; this.tentativeOutput.Values[ind].payload = newReg; } else { dest_vsync[this.iter] = synctime; dest_vother[this.iter] = otherTime; this.batch[this.iter] = newReg; destkey[this.iter] = srckey[i]; dest_hash[this.iter] = src_hash[i]; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } if (this.hasOutgoingArcs[ns]) { int index = this.activeStates.Insert(src_hash[i]); this.activeStates.Values[index].key = srckey[i]; this.activeStates.Values[index].state = ns; this.activeStates.Values[index].register = newReg; this.activeStates.Values[index].PatternStartTimestamp = synctime; // Add epsilon arc destinations to stack if (this.epsilonStateMap == null) { break; } if (this.epsilonStateMap[ns] != null) { for (int cnt2 = 0; cnt2 < this.epsilonStateMap[ns].Length; cnt2++) { stack.Push(this.epsilonStateMap[ns][cnt2]); } } } if (stack.Count == 0) { break; } ns = stack.Pop(); } if (this.IsDeterministic) { break; // We are guaranteed to have only one successful transition } } } } if (this.IsDeterministic) { break; // We are guaranteed to have only one start state } } } else if (batch.vother.col[i] < 0) { long synctime = src_vsync[i]; if (!this.IsSyncTimeSimultaneityFree && synctime > this.lastSyncTime) // move time forward { this.seenEvent.Clear(); if (this.tentativeOutput.Count > 0) { tentativeVisibleTraverser.currIndex = 0; while (tentativeVisibleTraverser.Next(out int index, out int hash)) { var elem = this.tentativeOutput.Values[index]; this.batch.vsync.col[this.iter] = this.lastSyncTime; this.batch.vother.col[this.iter] = elem.other; this.batch.payload.col[this.iter] = elem.payload; this.batch.key.col[this.iter] = elem.key; this.batch.hash.col[this.iter] = hash; this.iter++; if (this.iter == Config.DataBatchSize) { FlushContents(); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } this.tentativeOutput.Clear(); // Clear the tentative output list } this.lastSyncTime = synctime; } // Update dest_* on punctuation in case this event will hit the batch boundary and allocate a new batch OnPunctuation(synctime); dest_vsync = this.batch.vsync.col; dest_vother = this.batch.vother.col; destkey = this.batch.key.col; dest_hash = this.batch.hash.col; } } } } batch.Free(); }