public void ShouldAllowStringToEndWithAnEscapedBackslash() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\"string\\\\\"not string"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"string\\\\\"", token.Text); }
public void ShouldReturnListStartTokenTypeWhenInputIsAnOpenParen() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("("))); Token token = lexer.Next(); Assert.AreEqual(TokenType.ListStart, token.Type); Assert.AreEqual("(", token.Text); }
public void ShouldReadBackslashUFollowedByFourHexDigitsAsCharacter() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\uF04A"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\uF04A", token.Text); }
public void ShouldReadBackslashUAsChar() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\u"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\u", token.Text); }
public void ShouldReturnMapStartTokenTypeWhenInputIsAnOpenCurlyBrace() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("{"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.MapStart, token.Type); Assert.AreEqual("{", token.Text); }
public void ShouldReturnVectorEndTokenTypeWhenInputIsAClosedBracket() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("]"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.VectorEnd, token.Type); Assert.AreEqual("]", token.Text); }
public void ShouldReturnNumberTokenTypeWhenInputIsInvalidNumber() { var stream = new PushBackCharacterStream(new StringReader("123asdf")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Number, token.Type); Assert.AreEqual("123asdf", token.Text); }
public void ShouldReadBackslashABackSlashFAsTwoCharacters() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\a\\f"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\a", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\f", token.Text); }
public LinkedList<Token> Tokenize(string input, int length) { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(input))); Token currentToken = lexer.Next(); LinkedList<Token> tokenList = new LinkedList<Token>(); int currentIndex = 0; while (currentToken != null && currentIndex < length) { tokenList.AddLast(currentToken); currentIndex += currentToken.Length; currentToken = lexer.Next(); } return tokenList; }
public void Execute() { List<string> lines = _textBuffer.GetSelectedLines(); List<string> uncommentedLines = new List<string>(); foreach (string line in lines) { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(line))); Token currentToken = lexer.Next(); while (currentToken != null && currentToken.Type == TokenType.Whitespace) currentToken = lexer.Next(); if (currentToken == null) uncommentedLines.Add(line); else if (currentToken.Type != TokenType.Comment) uncommentedLines.Add(line); else uncommentedLines.Add(line.Remove(currentToken.StartIndex, 1)); } _textBuffer.ReplaceSelectedLines(uncommentedLines); }
private void AddNewTokensToBuffer() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(_textBuffer.GetText(0)))); Token currentToken = lexer.Next(); LinkedList<IndexToken> newTokens = new LinkedList<IndexToken>(); int currentIndex = 0; while (currentToken != null) { _tokenizedBuffer.CurrentState.AddLast(currentToken); newTokens.AddLast(new IndexToken(currentIndex, currentToken)); currentIndex += currentToken.Length; currentToken = lexer.Next(); } foreach (var t in newTokens) TokenChanged(this, new TokenChangedEventArgs(t)); }
private void ModifyTokensInBuffer(TextChangeData change) { IndexTokenNode firstToken = _tokenizedBuffer.CurrentState.FindTokenBeforeIndex(change.Position); Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(_textBuffer.GetText(firstToken.IndexToken.StartIndex)))); int oldBufferStartIndex = firstToken.IndexToken.StartIndex + change.Delta; int newBufferStartIndex = firstToken.IndexToken.StartIndex; int endPosition = change.Position + change.LengthOfChangedText; LinkedList<LinkedListNode<Token>> oldTokens = new LinkedList<LinkedListNode<Token>>(); LinkedList<IndexToken> newTokens = new LinkedList<IndexToken>(); Token newToken = lexer.Next(); IndexTokenNode oldToken = firstToken; while (newBufferStartIndex + newToken.Length != oldBufferStartIndex + oldToken.IndexToken.Token.Length || (change.Delta < 0 && oldToken.IndexToken.StartIndex + oldToken.IndexToken.Token.Length < endPosition) || (change.Delta > 0 && newBufferStartIndex + newToken.Length < endPosition)) { if (newBufferStartIndex + newToken.Length < oldBufferStartIndex + oldToken.IndexToken.Token.Length) { newTokens.AddLast(new IndexToken(newBufferStartIndex, newToken)); newBufferStartIndex += newToken.Length; newToken = lexer.Next(); } else { oldTokens.AddLast(oldToken.Node); oldBufferStartIndex += oldToken.IndexToken.Token.Length; oldToken = oldToken.Next(); } } oldTokens.AddLast(oldToken.Node); newTokens.AddLast(new IndexToken(newBufferStartIndex, newToken)); foreach (var t in newTokens) _tokenizedBuffer.CurrentState.AddBefore(firstToken.Node, t.Token); foreach (var t in oldTokens) _tokenizedBuffer.CurrentState.Remove(t); foreach (var t in newTokens) TokenChanged(this, new TokenChangedEventArgs(t)); }
public void ShouldReturnNumberFollowedByCharacter() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("123\\s"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Number, token.Type); Assert.AreEqual("123", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\s", token.Text); }
public void ShouldReadBackslashUFollowedByThreeHexDigitsAndAZAsSingleUCharacterFollowedByASymbol() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\uAF9Z"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\u", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("AF9Z", token.Text); }
public void ShouldReadBackslashUFollowedByTwoHexDigitsAsSingleUCharacterFollowedByANumber() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\u19"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Character, token.Type); Assert.AreEqual("\\u", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Number, token.Type); Assert.AreEqual("19", token.Text); }
public void ShouldStopParsingSymbolWhenDoubleQuoteFound() { var stream = new PushBackCharacterStream(new StringReader("asdf\"str\"")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("asdf", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"str\"", token.Text); }
public void ShouldReturnCommentToEndOfLineOnly() { var stream = new PushBackCharacterStream(new StringReader("; test\r\n123")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Comment, token.Type); Assert.AreEqual("; test", token.Text); }
public void ShouldReturnStringTokenWhenInputIsOnlyADoubleQuote() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\""))); Token token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"", token.Text); }
public void ShouldReturnBooleanWhenFalseIsInput() { var stream = new PushBackCharacterStream(new StringReader("false")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Boolean, token.Type); Assert.AreEqual("false", token.Text); Assert.IsFalse(stream.HasMore); }
public void ShouldReturnRealNumber() { var stream = new PushBackCharacterStream(new StringReader("123.321")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Number, token.Type); Assert.AreEqual("123.321", token.Text); }
public void ShouldReturnTwoSymbolsSeparatedByWhitespace() { var stream = new PushBackCharacterStream(new StringReader("symbol1 symbol2")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("symbol1", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Whitespace, token.Type); token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("symbol2", token.Text); }
public void ShouldReturnSymbolImmediatelyFollowedByComment() { var stream = new PushBackCharacterStream(new StringReader("test;comment")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("test", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Comment, token.Type); Assert.AreEqual(";comment", token.Text); }
public void ShouldReturnSymbolWhenItHasADot() { var stream = new PushBackCharacterStream(new StringReader("namespace.test")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("namespace.test", token.Text); }
public void ShouldReturnSymbolWithOnlyASingleAmpersand() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("&"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.Symbol, token.Type); Assert.AreEqual("&", token.Text); }
public void ShouldReturnIgnoreReaderMacro() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("#_(defn"))); Token token = lexer.Next(); Assert.AreEqual(TokenType.IgnoreReaderMacro, token.Type); Assert.AreEqual("#_", token.Text); }
public void ShouldReturnNil() { var stream = new PushBackCharacterStream(new StringReader("nil")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Nil, token.Type); Assert.AreEqual("nil", token.Text); Assert.IsFalse(stream.HasMore); }
public void ShouldReturnKeywordWithNoName() { var stream = new PushBackCharacterStream(new StringReader(":")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Keyword, token.Type); Assert.AreEqual(":", token.Text); }
public void ShouldReturnCommentWithTrailingWhitespace() { var stream = new PushBackCharacterStream(new StringReader("; test text \r\n")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Comment, token.Type); Assert.AreEqual("; test text ", token.Text); token = lexer.Next(); Assert.AreEqual(TokenType.Whitespace, token.Type); Assert.AreEqual("\r\n", token.Text); }
public void ShouldReturnStringForProperlyTerminatingString() { Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\"asdf\""))); Token token = lexer.Next(); Assert.AreEqual(TokenType.String, token.Type); Assert.AreEqual("\"asdf\"", token.Text); }
public void ShouldReturnKeywordFollowByListStart() { var stream = new PushBackCharacterStream(new StringReader(":asdf(")); Lexer lexer = new Lexer(stream); Token token = lexer.Next(); Assert.AreEqual(TokenType.Keyword, token.Type); Assert.AreEqual(":asdf", token.Text); Assert.AreEqual(TokenType.ListStart, lexer.Next().Type); }