public override IToken NextToken() { if (buffer.Count > 0) { return(buffer.DequeueBottom()); } else { // matched rule adds at least one to buffer via emit(t) base.NextToken(); // ignore return value; we use buffer return(buffer.DequeueBottom()); } }
// Copy work from another thread to thread "index_successors". void GetWork(int index) { lock (Stack[index]) { // Clean up. while (Stack[index].Count > 1 && Stack[index].PeekTop().Item2.Count == 0) { Stack[index].Pop(); } // Check if there is work. if (!(Stack[index].Count == 1 && Stack[index].PeekTop().Item2.Count == 0)) { return; } } bool done = false; int from = 0; for (int j = 0; j < NUMRETRY - 1; ++j) { from = (from + 1) % NumberOfWorkers; lock (Stack[from]) { if (Stack[from].Count > CutOff) { // Check if there actually is work in Stack[from]. // There may be a stack full of empty "todo" work lists, in which case the // thread that owns the stack hasn't yet cleaned up. int count = 0; for (int i = 0; i < Stack[from].Count - CutOff; ++i) { count += Stack[from].PeekBottom(i).Item2.Count; } if (count <= 1) { continue; } count = count / 2; if (count < 1) { continue; } //System.Console.WriteLine("Stealing " + count + " work items from " + from + " to " + index_successors); // Work is available at the stack of threads. // Grab "count" nodes to work on. // Copy stack "from" to "index_successors", // and then divide the two stacks into disjoint sets of // vertices. StackQueue <Tuple <T, StackQueue <T> > > new_stack = new StackQueue <Tuple <T, StackQueue <T> > >(); for (int i = 0; i < Stack[from].Count - CutOff && count > 0; ++i) { Tuple <T, StackQueue <T> > tf = Stack[from].PeekBottom(i); T tb = tf.Item1; StackQueue <T> s = tf.Item2; // Make partitions. StackQueue <T> work = new StackQueue <T>(); for ( ; count > 0 && s.Count != 0; --count) { T v = s.DequeueBottom(); // Side effect removing work. work.Push(v); } Tuple <T, StackQueue <T> > tt = new Tuple <T, StackQueue <T> >(tb, work); new_stack.Push(tt); } // assign new stack. Stack[index] = new_stack; // Dump stacks. //Dump(); done = true; } } if (done) { return; } } }
// Algorithm's description here: // https://www.haskell.org/onlinereport/haskell2010/haskellch10.html // https://en.wikibooks.org/wiki/Haskell/Indentation public override IToken NextToken() { if (tokenQueue.Any()) { return(tokenQueue.DequeueBottom()); } IToken next = base.NextToken(); var type = next.Type; if (type == HaskellLexer.OpenPragmaBracket) { inPragmas = true; } if (startIndent == -1 && type != HaskellLexer.NEWLINE && type != HaskellLexer.WS && type != HaskellLexer.TAB && type != HaskellLexer.OCURLY) { if (type == HaskellLexer.MODULE) { moduleStartIndent = true; wasModuleExport = true; } if (type != HaskellLexer.MODULE && !moduleStartIndent && !inPragmas) { startIndent = next.Column; } else if (lastKeyWord.Equals("where") && moduleStartIndent) { lastKeyWord = ""; prevWasKeyWord = false; nestedLevel = 0; moduleStartIndent = false; prevWasEndl = false; startIndent = next.Column; tokenQueue.Push(createToken(HaskellLexer.VOCURLY, "VOCURLY", next)); tokenQueue.Push(createToken(type, next.Text, next)); return(tokenQueue.DequeueBottom()); } } if (type == HaskellLexer.ClosePragmaBracket) { inPragmas = false; } if (type == HaskellLexer.OCURLY) { if (prevWasKeyWord) { nestedLevel--; prevWasKeyWord = false; } if (moduleStartIndent) { moduleStartIndent = false; // because will be HaskellLexer.CCURLY in the end of file wasModuleExport = false; } ignoreIndent = true; prevWasEndl = false; } if (prevWasKeyWord && !prevWasEndl && !moduleStartIndent && type != HaskellLexer.WS && type != HaskellLexer.NEWLINE && type != HaskellLexer.TAB && type != HaskellLexer.OCURLY) { prevWasKeyWord = false; indentStack.Push(new Pair <string, int>(lastKeyWord, next.Column)); tokenQueue.Push(createToken(HaskellLexer.VOCURLY, "VOCURLY", next)); } if (ignoreIndent && (type == HaskellLexer.WHERE || type == HaskellLexer.DO || type == HaskellLexer.MDO || type == HaskellLexer.LET || type == HaskellLexer.OF || type == HaskellLexer.LCASE || type == HaskellLexer.REC || type == HaskellLexer.CCURLY) ) { ignoreIndent = false; } if (pendingDent && prevWasKeyWord && !ignoreIndent && indentCount <= getSavedIndent() && type != HaskellLexer.NEWLINE && type != HaskellLexer.WS) { tokenQueue.Push(createToken(HaskellLexer.VOCURLY, "VOCURLY", next)); prevWasKeyWord = false; prevWasEndl = true; } if (pendingDent && prevWasEndl && !ignoreIndent && indentCount <= getSavedIndent() && type != HaskellLexer.NEWLINE && type != HaskellLexer.WS && type != HaskellLexer.WHERE && type != HaskellLexer.IN && type != HaskellLexer.DO && type != HaskellLexer.MDO && type != HaskellLexer.OF && type != HaskellLexer.LCASE && type != HaskellLexer.REC && type != HaskellLexer.CCURLY && type != Antlr4.Runtime.TokenConstants.EOF) { while (nestedLevel > indentStack.Count()) { if (nestedLevel > 0) { nestedLevel--; } tokenQueue.Push(createToken(HaskellLexer.SEMI, "SEMI", next)); tokenQueue.Push(createToken(HaskellLexer.VCCURLY, "VCCURLY", next)); } while (indentCount < getSavedIndent()) { if (indentStack.Any() && nestedLevel > 0) { indentStack.Pop(); nestedLevel--; } tokenQueue.Push(createToken(HaskellLexer.SEMI, "SEMI", next)); tokenQueue.Push(createToken(HaskellLexer.VCCURLY, "VCCURLY", next)); } if (indentCount == getSavedIndent()) { tokenQueue.Push(createToken(HaskellLexer.SEMI, "SEMI", next)); } prevWasEndl = false; if (indentCount == startIndent) { pendingDent = false; } } if (pendingDent && prevWasKeyWord && !moduleStartIndent && !ignoreIndent && indentCount > getSavedIndent() && type != HaskellLexer.NEWLINE && type != HaskellLexer.WS && type != Antlr4.Runtime.TokenConstants.EOF) { prevWasKeyWord = false; if (prevWasEndl) { indentStack.Push(new Pair <string, int>(lastKeyWord, indentCount)); prevWasEndl = false; } tokenQueue.Push(createToken(HaskellLexer.VOCURLY, "VOCURLY", next)); } if (pendingDent && initialIndentToken == null && HaskellLexer.NEWLINE != type) { initialIndentToken = next; } if (next != null && type == HaskellLexer.NEWLINE) { prevWasEndl = true; } if (type == HaskellLexer.WHERE || type == HaskellLexer.LET || type == HaskellLexer.DO || type == HaskellLexer.MDO || type == HaskellLexer.OF || type == HaskellLexer.LCASE || type == HaskellLexer.REC) { // if next will be HaskellLexer.OCURLY need to decrement nestedLevel nestedLevel++; prevWasKeyWord = true; prevWasEndl = false; lastKeyWord = next.Text; if (type == HaskellLexer.WHERE) { if (indentStack.Any() && (indentStack.Peek().first().Equals("do") || indentStack.Peek().first().Equals("mdo"))) { tokenQueue.Push(createToken(HaskellLexer.SEMI, "SEMI", next)); tokenQueue.Push(createToken(HaskellLexer.VCCURLY, "VCCURLY", next)); indentStack.Pop(); nestedLevel--; } } } if (next != null && type == HaskellLexer.OCURLY) { prevWasKeyWord = false; } if (next == null || Antlr4.Runtime.TokenConstants.HiddenChannel == next.Channel || HaskellLexer.NEWLINE == type) { return(next); } if (type == HaskellLexer.IN) { processINToken(next); } if (type == Antlr4.Runtime.TokenConstants.EOF) { processEOFToken(next); } pendingDent = true; tokenQueue.Push(next); return(tokenQueue.DequeueBottom()); }