public void ShouldAllowStringToEndWithAnEscapedBackslash() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\"string\\\\\"not string"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"string\\\\\"", token.Text); }
public void ShouldReadBackslashUFollowedByFourHexDigitsAsCharacter() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\uF04A"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\uF04A", token.Text); }
public void ShouldReadBackslashUAsChar() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\u"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\u", token.Text); }
public void ShouldReadBackslashABackSlashFAsTwoCharacters() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\a\\f"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\a", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\f", token.Text); }
public LinkedList<Token> Tokenize(string input, int length) { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(input))); Token currentToken = lexer.Next(); LinkedList<Token> tokenList = new LinkedList<Token>(); int currentIndex = 0; while (currentToken != null && currentIndex < length) { tokenList.AddLast(currentToken); currentIndex += currentToken.Length; currentToken = lexer.Next(); } return tokenList; }
public List<string> Execute(List<string> lines) { var uncommentedLines = new List<string>(); foreach (var line in lines) { var lexer = new Lexer(new PushBackCharacterStream(new StringReader(line))); var currentToken = lexer.Next(); while (currentToken != null && currentToken.Type == TokenType.Whitespace) currentToken = lexer.Next(); if (currentToken == null) uncommentedLines.Add(line); else if (currentToken.Type != TokenType.Comment) uncommentedLines.Add(line); else uncommentedLines.Add(line.Remove(currentToken.StartIndex, 1)); } return uncommentedLines; }
private void AddNewTokensToBuffer() { var lexer = new Lexer(new PushBackCharacterStream(new StringReader(_textBuffer.GetText(0)))); var currentToken = lexer.Next(); var newTokens = new LinkedList<IndexToken>(); var currentIndex = 0; while (currentToken != null) { _tokenizedBuffer.CurrentState.AddLast(currentToken); newTokens.AddLast(new IndexToken(currentIndex, currentToken)); currentIndex += currentToken.Length; currentToken = lexer.Next(); } foreach (var t in newTokens) TokenChanged(this, new TokenChangedEventArgs(t)); }
private void ModifyTokensInBuffer(TextChangeData change) { var firstToken = _tokenizedBuffer.CurrentState.FindTokenBeforeIndex(change.Position); var lexer = new Lexer(new PushBackCharacterStream(new StringReader(_textBuffer.GetText(firstToken.IndexToken.StartIndex)))); var oldBufferStartIndex = firstToken.IndexToken.StartIndex + change.Delta; var newBufferStartIndex = firstToken.IndexToken.StartIndex; var endPosition = change.Position + change.LengthOfChangedText; var oldTokens = new LinkedList<LinkedListNode<Token>>(); var newTokens = new LinkedList<IndexToken>(); var newToken = lexer.Next(); var oldToken = firstToken; while (newBufferStartIndex + newToken.Length != oldBufferStartIndex + oldToken.IndexToken.Token.Length || (change.Delta < 0 && oldToken.IndexToken.StartIndex + oldToken.IndexToken.Token.Length < endPosition) || (change.Delta > 0 && newBufferStartIndex + newToken.Length < endPosition)) { if (newBufferStartIndex + newToken.Length < oldBufferStartIndex + oldToken.IndexToken.Token.Length) { newTokens.AddLast(new IndexToken(newBufferStartIndex, newToken)); newBufferStartIndex += newToken.Length; newToken = lexer.Next(); } else { oldTokens.AddLast(oldToken.Node); oldBufferStartIndex += oldToken.IndexToken.Token.Length; oldToken = oldToken.Next(); } } oldTokens.AddLast(oldToken.Node); newTokens.AddLast(new IndexToken(newBufferStartIndex, newToken)); foreach (var t in newTokens) _tokenizedBuffer.CurrentState.AddBefore(firstToken.Node, t.Token); foreach (var t in oldTokens) _tokenizedBuffer.CurrentState.Remove(t); foreach (var t in newTokens) TokenChanged(this, new TokenChangedEventArgs(t)); }
public void ShouldReturnSymbolWhenItHasADot() { var stream = new PushBackCharacterStream(new StringReader("namespace.test")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("namespace.test", token.Text); }
public void ShouldReturnSymbolImmediatelyFollowedByComment() { var stream = new PushBackCharacterStream(new StringReader("test;comment")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("test", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Comment, token.Type); Assert.AreEqual(";comment", token.Text); }
public void ShouldReturnSymbolFollowedByCharacter() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("asdf\\s"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("asdf", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\s", token.Text); }
public void ShouldReturnStringTokenWhenInputIsOnlyADoubleQuote() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\""))); Token token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"", token.Text); }
public void ShouldReturnKeywordWithNoName() { var stream = new PushBackCharacterStream(new StringReader(":")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Keyword, token.Type); Assert.AreEqual(":", token.Text); }
public void ShouldStopParsingSymbolWhenDoubleQuoteFound() { var stream = new PushBackCharacterStream(new StringReader("asdf\"str\"")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("asdf", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"str\"", token.Text); }
public void ShouldReturnVectorStartTokenTypeWhenInputIsAnOpenBracket() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("["))); Token token = lexer.Next(); Assert.AreEqual(TokenType.VectorStart, token.Type); Assert.AreEqual("[", token.Text); }
public void ShouldReturnNumberTokenTypeWhenInputIsNumber() { var stream = new PushBackCharacterStream(new StringReader("123")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Number, token.Type); Assert.AreEqual("123", token.Text); Assert.IsFalse(stream.HasMore); }
public void ShouldReturnNumberFollowByWhitespaceAndAString() { var stream = new PushBackCharacterStream(new StringReader("123 \"asdf\"")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Number, token.Type); Assert.AreEqual("123", token.Text); Assert.AreEqual(0, token.StartIndex); token = lexer.Next(); Assert.AreEqual(TokenType.Whitespace, token.Type); Assert.AreEqual(" ", token.Text); Assert.AreEqual(3, token.StartIndex); token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"asdf\"", token.Text); Assert.AreEqual(4, token.StartIndex); }
public void ShouldReturnNullWhenAtEndOfStream() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(""))); Assert.IsNull(lexer.Next()); }
public void ShouldReturnNil() { var stream = new PushBackCharacterStream(new StringReader("nil")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Nil, token.Type); Assert.AreEqual("nil", token.Text); Assert.IsFalse(stream.HasMore); }
public void ShouldReturnMapEndTokenTypeWhenInputIsAClosedCurlyBrace() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("}"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.MapEnd, token.Type); Assert.AreEqual("}", token.Text); }
public void ShouldReturnListEndTokenTypeWhenInputIsAClosedParen() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(")"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.ListEnd, token.Type); Assert.AreEqual(")", token.Text); }
public void ShouldReturnSymbolWithOnlyASingleAmpersand() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("&"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("&", token.Text); }
public void ShouldReturnTwoSymbolsSeparatedByWhitespace() { var stream = new PushBackCharacterStream(new StringReader("symbol1 symbol2")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("symbol1", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Whitespace, token.Type); token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("symbol2", token.Text); }
public void ShouldReturnIgnoreReaderMacro() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("#_(defn"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.IgnoreReaderMacro, token.Type); Assert.AreEqual("#_", token.Text); }
public void ShouldReturnWhitespaceForTabsSpacesCommasAndReturnCharacters() { string input = " \t \r\n , "; Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(input))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Whitespace, token.Type); Assert.AreEqual(input, token.Text); }
public void ShouldReturnStringThatDoesNotTerminateOnBackslashQuote() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\"asdf\\\"asdf\""))); Token token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"asdf\\\"asdf\"", token.Text); }
public void ShouldReturnStringForRunOnString() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\"asdfasdf"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"asdfasdf", token.Text); }
public void ShouldReturnKeywordFollowByListStart() { var stream = new PushBackCharacterStream(new StringReader(":asdf(")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Keyword, token.Type); Assert.AreEqual(":asdf", token.Text); Assert.AreEqual(TokenType.ListStart, lexer.Next().Type); }
public void ShouldReadBackslashUFollowedByThreeHexDigitsAndAZAsSingleUCharacterFollowedByASymbol() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\uAF9Z"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\u", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("AF9Z", token.Text); }
public void ShouldReturnRealNumber() { var stream = new PushBackCharacterStream(new StringReader("123.321")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Number, token.Type); Assert.AreEqual("123.321", token.Text); }