private static TokenSequence LayoutTokens(TokenSequence tokens) { TokenSequence laidOutTokens = new TokenSequence(); Stack<int> indents = new Stack<int>(); indents.Push(0); while(!tokens.Empty) { Token token = tokens.RemoveFirst(); if(token.Type == TokenType.Indent) { if(token.Lexeme.Length == indents.Peek()) continue; if(token.Lexeme.Length > indents.Peek()) { indents.Push(token.Lexeme.Length); laidOutTokens.Add(new Token(TokenType.BeginBlock, token.Location)); } // if else { while(indents.Peek() != 0 && indents.Peek() != token.Lexeme.Length) { indents.Pop(); laidOutTokens.Add(new Token(TokenType.EndBlock, token.Location)); } // while } // else } // if else laidOutTokens.Add(token); } // while // // Close remaining blocks while(indents.Peek() != 0) { indents.Pop(); laidOutTokens.Add(new Token(TokenType.EndBlock, null)); } // while return laidOutTokens; }
private static TokenSequence OptimizeTokens(TokenSequence tokens) { TokenSequence optimizedTokens = new TokenSequence(); // // Removing sequences of Indent(EndStatement)+ tokens // and compacting EndStatement+ tokens to just one while(!tokens.Empty) { Token token = tokens.RemoveFirst(); if(token.Type == TokenType.Indent && !tokens.Empty && tokens.First.Type == TokenType.EndStatement) while(!tokens.Empty && tokens.First.Type == TokenType.EndStatement) tokens.RemoveFirst(); else if(token.Type == TokenType.EndStatement && !tokens.Empty && tokens.First.Type == TokenType.EndStatement) { while(!tokens.Empty && tokens.First.Type == TokenType.EndStatement) tokens.RemoveFirst(); optimizedTokens.Add(new Token(TokenType.EndStatement, null)); } // else if else optimizedTokens.Add(token); } // while return optimizedTokens; }
public void RemoveFirst() { TokenSequence sequence = new TokenSequence(); sequence.Add(new Token(TokenType.BeginBlock, null)); sequence.Add(new Token(TokenType.Colon, null)); sequence.Add(new Token(TokenType.Comma, null)); Assert.AreEqual(new Token(TokenType.BeginBlock, null), sequence.RemoveFirst()); Assert.AreEqual(2, sequence.Count); }