public static Antlr4.Runtime.Atn.PredictionContext GetCachedContext(Antlr4.Runtime.Atn.PredictionContext context, ConcurrentDictionary <Antlr4.Runtime.Atn.PredictionContext, Antlr4.Runtime.Atn.PredictionContext> contextCache, PredictionContext.IdentityHashMap visited) { if (context.IsEmpty) { return(context); } Antlr4.Runtime.Atn.PredictionContext existing; if (visited.TryGetValue(context, out existing)) { return(existing); } if (contextCache.TryGetValue(context, out existing)) { visited[context] = existing; return(existing); } bool changed = false; Antlr4.Runtime.Atn.PredictionContext[] parents = new Antlr4.Runtime.Atn.PredictionContext[context.Size]; for (int i = 0; i < parents.Length; i++) { Antlr4.Runtime.Atn.PredictionContext parent = GetCachedContext(context.GetParent(i), contextCache, visited); if (changed || parent != context.GetParent(i)) { if (!changed) { parents = new Antlr4.Runtime.Atn.PredictionContext[context.Size]; for (int j = 0; j < context.Size; j++) { parents[j] = context.GetParent(j); } changed = true; } parents[i] = parent; } } if (!changed) { existing = contextCache.GetOrAdd(context, context); visited[context] = existing; return(context); } // We know parents.length>0 because context.isEmpty() is checked at the beginning of the method. Antlr4.Runtime.Atn.PredictionContext updated; if (parents.Length == 1) { updated = new SingletonPredictionContext(parents[0], context.GetReturnState(0)); } else { ArrayPredictionContext arrayPredictionContext = (ArrayPredictionContext)context; updated = new ArrayPredictionContext(parents, arrayPredictionContext.returnStates, context.cachedHashCode); } existing = contextCache.GetOrAdd(updated, updated); visited[updated] = existing; visited[context] = existing; return(updated); }
private bool Equals(Antlr4.Runtime.Atn.ArrayPredictionContext other, HashSet <PredictionContextCache.IdentityCommutativePredictionContextOperands > visited) { Stack <PredictionContext> selfWorkList = new Stack <PredictionContext>(); Stack <PredictionContext> otherWorkList = new Stack <PredictionContext>(); selfWorkList.Push(this); otherWorkList.Push(other); while (selfWorkList.Count > 0) { PredictionContextCache.IdentityCommutativePredictionContextOperands operands = new PredictionContextCache.IdentityCommutativePredictionContextOperands(selfWorkList .Pop(), otherWorkList.Pop()); if (!visited.Add(operands)) { continue; } int selfSize = operands.X.Size; if (selfSize == 0) { if (!operands.X.Equals(operands.Y)) { return(false); } continue; } int otherSize = operands.Y.Size; if (selfSize != otherSize) { return(false); } for (int i = 0; i < selfSize; i++) { if (operands.X.GetReturnState(i) != operands.Y.GetReturnState(i)) { return(false); } PredictionContext selfParent = operands.X.GetParent(i); PredictionContext otherParent = operands.Y.GetParent(i); if (selfParent.GetHashCode() != otherParent.GetHashCode()) { return(false); } if (selfParent != otherParent) { selfWorkList.Push(selfParent); otherWorkList.Push(otherParent); } } } return(true); }
public override bool Equals(object o) { if (this == o) { return(true); } else { if (!(o is Antlr4.Runtime.Atn.ArrayPredictionContext)) { return(false); } } if (this.GetHashCode() != o.GetHashCode()) { return(false); } // can't be same if hash is different Antlr4.Runtime.Atn.ArrayPredictionContext other = (Antlr4.Runtime.Atn.ArrayPredictionContext)o; return(Equals(other, new HashSet <PredictionContextCache.IdentityCommutativePredictionContextOperands>())); }
// @Override // public int findReturnState(int returnState) { // return Arrays.binarySearch(returnStates, returnState); // } public override bool Equals(Object o) { if (this == o) { return(true); } else if (!(o is ArrayPredictionContext)) { return(false); } if (this.GetHashCode() != o.GetHashCode()) { return(false); // can't be same if hash is different } ArrayPredictionContext a = (ArrayPredictionContext)o; return(Arrays.Equals(returnStates, a.returnStates) && Arrays.Equals(parents, a.parents)); }
public static PredictionContext MergeRoot(SingletonPredictionContext a, SingletonPredictionContext b, bool rootIsWildcard) { if (rootIsWildcard) { if (a == PredictionContext.EMPTY) { return(PredictionContext.EMPTY); // * + b = * } if (b == PredictionContext.EMPTY) { return(PredictionContext.EMPTY); // a + * = * } } else { if (a == EMPTY && b == EMPTY) { return(EMPTY); // $ + $ = $ } if (a == EMPTY) { // $ + x = [$,x] int[] payloads = { b.returnState, EMPTY_RETURN_STATE }; PredictionContext[] parents = { b.parent, null }; PredictionContext joined = new ArrayPredictionContext(parents, payloads); return(joined); } if (b == EMPTY) { // x + $ = [$,x] ($ is always first if present) int[] payloads = { a.returnState, EMPTY_RETURN_STATE }; PredictionContext[] parents = { a.parent, null }; PredictionContext joined = new ArrayPredictionContext(parents, payloads); return(joined); } } return(null); }
internal static PredictionContext Merge(PredictionContext a, PredictionContext b, bool rootIsWildcard, MergeCache mergeCache) { if (a == b || a.Equals(b)) { return(a); } if (a is SingletonPredictionContext && b is SingletonPredictionContext) { return(MergeSingletons((SingletonPredictionContext)a, (SingletonPredictionContext)b, rootIsWildcard, mergeCache)); } // At least one of a or b is array // If one is $ and rootIsWildcard, return $ as * wildcard if (rootIsWildcard) { if (a is EmptyPredictionContext) { return(a); } if (b is EmptyPredictionContext) { return(b); } } // convert singleton so both are arrays to normalize if (a is SingletonPredictionContext) { a = new ArrayPredictionContext((SingletonPredictionContext)a); } if (b is SingletonPredictionContext) { b = new ArrayPredictionContext((SingletonPredictionContext)b); } return(MergeArrays((ArrayPredictionContext)a, (ArrayPredictionContext)b, rootIsWildcard, mergeCache)); }
private static PredictionContext AppendContext(PredictionContext context, PredictionContext suffix, PredictionContext.IdentityHashMap visited) { if (suffix.IsEmpty) { if (IsEmptyLocal(suffix)) { if (context.HasEmpty) { return EmptyLocal; } throw new NotSupportedException("what to do here?"); } return context; } if (suffix.Size != 1) { throw new NotSupportedException("Appending a tree suffix is not yet supported."); } PredictionContext result; if (!visited.TryGetValue(context, out result)) { if (context.IsEmpty) { result = suffix; } else { int parentCount = context.Size; if (context.HasEmpty) { parentCount--; } PredictionContext[] updatedParents = new PredictionContext[parentCount]; int[] updatedReturnStates = new int[parentCount]; for (int i = 0; i < parentCount; i++) { updatedReturnStates[i] = context.GetReturnState(i); } for (int i_1 = 0; i_1 < parentCount; i_1++) { updatedParents[i_1] = AppendContext(context.GetParent(i_1), suffix, visited); } if (updatedParents.Length == 1) { result = new SingletonPredictionContext(updatedParents[0], updatedReturnStates[0] ); } else { System.Diagnostics.Debug.Assert(updatedParents.Length > 1); result = new Antlr4.Runtime.Atn.ArrayPredictionContext(updatedParents, updatedReturnStates ); } if (context.HasEmpty) { result = PredictionContext.Join(result, suffix); } } visited[context] = result; } return result; }
private static PredictionContext AppendContext(PredictionContext context, PredictionContext suffix, PredictionContext.IdentityHashMap visited) { if (suffix.IsEmpty) { if (IsEmptyLocal(suffix)) { if (context.HasEmpty) { return(EmptyLocal); } throw new NotSupportedException("what to do here?"); } return(context); } if (suffix.Size != 1) { throw new NotSupportedException("Appending a tree suffix is not yet supported."); } PredictionContext result; if (!visited.TryGetValue(context, out result)) { if (context.IsEmpty) { result = suffix; } else { int parentCount = context.Size; if (context.HasEmpty) { parentCount--; } PredictionContext[] updatedParents = new PredictionContext[parentCount]; int[] updatedReturnStates = new int[parentCount]; for (int i = 0; i < parentCount; i++) { updatedReturnStates[i] = context.GetReturnState(i); } for (int i_1 = 0; i_1 < parentCount; i_1++) { updatedParents[i_1] = AppendContext(context.GetParent(i_1), suffix, visited); } if (updatedParents.Length == 1) { result = new SingletonPredictionContext(updatedParents[0], updatedReturnStates[0]); } else { System.Diagnostics.Debug.Assert(updatedParents.Length > 1); result = new Antlr4.Runtime.Atn.ArrayPredictionContext(updatedParents, updatedReturnStates); } if (context.HasEmpty) { result = PredictionContext.Join(result, suffix); } } visited[context] = result; } return(result); }
public static PredictionContext GetCachedContext(PredictionContext context, PredictionContextCache contextCache, PredictionContext.IdentityHashMap visited) { if (context.IsEmpty) { return(context); } PredictionContext existing = visited.Get(context); if (existing != null) { return(existing); } existing = contextCache.Get(context); if (existing != null) { visited.Put(context, existing); return(existing); } bool changed = false; PredictionContext[] parents = new PredictionContext[context.Size]; for (int i = 0; i < parents.Length; i++) { PredictionContext parent = GetCachedContext(context.GetParent(i), contextCache, visited); if (changed || parent != context.GetParent(i)) { if (!changed) { parents = new PredictionContext[context.Size]; for (int j = 0; j < context.Size; j++) { parents[j] = context.GetParent(j); } changed = true; } parents[i] = parent; } } if (!changed) { contextCache.Add(context); visited.Put(context, context); return(context); } PredictionContext updated; if (parents.Length == 0) { updated = EMPTY; } else if (parents.Length == 1) { updated = SingletonPredictionContext.Create(parents[0], context.GetReturnState(0)); } else { ArrayPredictionContext arrayPredictionContext = (ArrayPredictionContext)context; updated = new ArrayPredictionContext(parents, arrayPredictionContext.returnStates); } contextCache.Add(updated); visited.Put(updated, updated); visited.Put(context, updated); return(updated); }
public static PredictionContext MergeArrays( ArrayPredictionContext a, ArrayPredictionContext b, bool rootIsWildcard, MergeCache mergeCache) { if (mergeCache != null) { PredictionContext previous = mergeCache.Get(a, b); if (previous != null) { return(previous); } previous = mergeCache.Get(b, a); if (previous != null) { return(previous); } } // merge sorted payloads a + b => M int i = 0; // walks a int j = 0; // walks b int k = 0; // walks target M array int[] mergedReturnStates = new int[a.returnStates.Length + b.returnStates.Length]; PredictionContext[] mergedParents = new PredictionContext[a.returnStates.Length + b.returnStates.Length]; // walk and merge to yield mergedParents, mergedReturnStates while (i < a.returnStates.Length && j < b.returnStates.Length) { PredictionContext a_parent = a.parents[i]; PredictionContext b_parent = b.parents[j]; if (a.returnStates[i] == b.returnStates[j]) { // same payload (stack tops are equal), must yield merged singleton int payload = a.returnStates[i]; // $+$ = $ bool both_dollar = payload == EMPTY_RETURN_STATE && a_parent == null && b_parent == null; bool ax_ax = (a_parent != null && b_parent != null) && a_parent.Equals(b_parent); // ax+ax -> ax if (both_dollar || ax_ax) { mergedParents[k] = a_parent; // choose left mergedReturnStates[k] = payload; } else // ax+ay -> a'[x,y] { PredictionContext mergedParent = Merge(a_parent, b_parent, rootIsWildcard, mergeCache); mergedParents[k] = mergedParent; mergedReturnStates[k] = payload; } i++; // hop over left one as usual j++; // but also skip one in right side since we merge } else if (a.returnStates[i] < b.returnStates[j]) { // copy a[i] to M mergedParents[k] = a_parent; mergedReturnStates[k] = a.returnStates[i]; i++; } else // b > a, copy b[j] to M { mergedParents[k] = b_parent; mergedReturnStates[k] = b.returnStates[j]; j++; } k++; } // copy over any payloads remaining in either array if (i < a.returnStates.Length) { for (int p = i; p < a.returnStates.Length; p++) { mergedParents[k] = a.parents[p]; mergedReturnStates[k] = a.returnStates[p]; k++; } } else { for (int p = j; p < b.returnStates.Length; p++) { mergedParents[k] = b.parents[p]; mergedReturnStates[k] = b.returnStates[p]; k++; } } // trim merged if we combined a few that had same stack tops if (k < mergedParents.Length) { // write index < last position; trim if (k == 1) { // for just one merged element, return singleton top PredictionContext a_ = SingletonPredictionContext.Create(mergedParents[0], mergedReturnStates[0]); if (mergeCache != null) { mergeCache.Put(a, b, a_); } return(a_); } mergedParents = Arrays.CopyOf(mergedParents, k); mergedReturnStates = Arrays.CopyOf(mergedReturnStates, k); } PredictionContext M = new ArrayPredictionContext(mergedParents, mergedReturnStates); // if we created same array as a or b, return that instead // TODO: track whether this is possible above during merge sort for speed if (M.Equals(a)) { if (mergeCache != null) { mergeCache.Put(a, b, a); } return(a); } if (M.Equals(b)) { if (mergeCache != null) { mergeCache.Put(a, b, b); } return(b); } CombineCommonParents(mergedParents); if (mergeCache != null) { mergeCache.Put(a, b, M); } return(M); }
public static PredictionContext MergeSingletons( SingletonPredictionContext a, SingletonPredictionContext b, bool rootIsWildcard, MergeCache mergeCache) { if (mergeCache != null) { PredictionContext previous = mergeCache.Get(a, b); if (previous != null) { return(previous); } previous = mergeCache.Get(b, a); if (previous != null) { return(previous); } } PredictionContext rootMerge = MergeRoot(a, b, rootIsWildcard); if (rootMerge != null) { if (mergeCache != null) { mergeCache.Put(a, b, rootMerge); } return(rootMerge); } if (a.returnState == b.returnState) { // a == b PredictionContext parent = Merge(a.parent, b.parent, rootIsWildcard, mergeCache); // if parent is same as existing a or b parent or reduced to a parent, return it if (parent == a.parent) { return(a); // ax + bx = ax, if a=b } if (parent == b.parent) { return(b); // ax + bx = bx, if a=b } // else: ax + ay = a'[x,y] // merge parents x and y, giving array node with x,y then remainders // of those graphs. dup a, a' points at merged array // new joined parent so create new singleton pointing to it, a' PredictionContext a_ = SingletonPredictionContext.Create(parent, a.returnState); if (mergeCache != null) { mergeCache.Put(a, b, a_); } return(a_); } else // a != b payloads differ // see if we can collapse parents due to $+x parents if local ctx { int[] payloads = new int[2]; PredictionContext[] parents = new PredictionContext[2]; PredictionContext pc; PredictionContext singleParent = null; if (a == b || (a.parent != null && a.parent.Equals(b.parent))) { // ax + bx = [a,b]x singleParent = a.parent; } if (singleParent != null) { // parents are same // sort payloads and use same parent if (a.returnState > b.returnState) { payloads[0] = b.returnState; payloads[1] = a.returnState; } else { payloads[0] = a.returnState; payloads[1] = b.returnState; } parents[0] = singleParent; parents[1] = singleParent; pc = new ArrayPredictionContext(parents, payloads); if (mergeCache != null) { mergeCache.Put(a, b, pc); } return(pc); } // parents differ and can't merge them. Just pack together // into array; can't merge. // ax + by = [ax,by] // sort by payload if (a.returnState > b.returnState) { payloads[0] = b.returnState; payloads[1] = a.returnState; parents[0] = b.parent; parents[1] = a.parent; } else { payloads[0] = a.returnState; payloads[1] = b.returnState; parents[0] = a.parent; parents[1] = b.parent; } pc = new ArrayPredictionContext(parents, payloads); if (mergeCache != null) { mergeCache.Put(a, b, pc); } return(pc); } }