예제 #1
0
 public void ShouldAllowStringToEndWithAnEscapedBackslash()
 {
     Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\"string\\\\\"not string")));
     Token token = lexer.Next();
     Assert.AreEqual(TokenType.String, token.Type);
     Assert.AreEqual("\"string\\\\\"", token.Text);
 }
예제 #2
0
		public void ShouldReturnListStartTokenTypeWhenInputIsAnOpenParen()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("(")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.ListStart, token.Type);
			Assert.AreEqual("(", token.Text);
		}
예제 #3
0
 public void ShouldReadBackslashUFollowedByFourHexDigitsAsCharacter()
 {
     Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\uF04A")));
     Token token = lexer.Next();
     Assert.AreEqual(TokenType.Character, token.Type);
     Assert.AreEqual("\\uF04A", token.Text);
 }
예제 #4
0
 public void ShouldReadBackslashUAsChar()
 {
     Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\u")));
     Token token = lexer.Next();
     Assert.AreEqual(TokenType.Character, token.Type);
     Assert.AreEqual("\\u", token.Text);
 }
예제 #5
0
		public void ShouldReturnMapStartTokenTypeWhenInputIsAnOpenCurlyBrace()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("{")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.MapStart, token.Type);
			Assert.AreEqual("{", token.Text);
		}
예제 #6
0
		public void ShouldReturnVectorEndTokenTypeWhenInputIsAClosedBracket()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("]")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.VectorEnd, token.Type);
			Assert.AreEqual("]", token.Text);
		}
예제 #7
0
		public void ShouldReturnNumberTokenTypeWhenInputIsInvalidNumber()
		{
			var stream = new PushBackCharacterStream(new StringReader("123asdf"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Number, token.Type);
			Assert.AreEqual("123asdf", token.Text);
		}
예제 #8
0
 public void ShouldReadBackslashABackSlashFAsTwoCharacters()
 {
     Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\a\\f")));
     Token token = lexer.Next();
     Assert.AreEqual(TokenType.Character, token.Type);
     Assert.AreEqual("\\a", token.Text);
     token = lexer.Next();
     Assert.AreEqual(TokenType.Character, token.Type);
     Assert.AreEqual("\\f", token.Text);
 }
예제 #9
0
        public LinkedList<Token> Tokenize(string input, int length)
        {
            Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(input)));
            Token currentToken = lexer.Next();
            LinkedList<Token> tokenList = new LinkedList<Token>();
            int currentIndex = 0;

            while (currentToken != null && currentIndex < length)
            {
                tokenList.AddLast(currentToken);
                currentIndex += currentToken.Length;
                currentToken = lexer.Next();
            }

            return tokenList;
        }
예제 #10
0
		public void Execute()
		{
			List<string> lines = _textBuffer.GetSelectedLines();
			List<string> uncommentedLines = new List<string>();

			foreach (string line in lines)
			{
				Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(line)));
				Token currentToken = lexer.Next();
				while (currentToken != null && currentToken.Type == TokenType.Whitespace) currentToken = lexer.Next();
				if (currentToken == null) uncommentedLines.Add(line);
				else if (currentToken.Type != TokenType.Comment) uncommentedLines.Add(line);
				else uncommentedLines.Add(line.Remove(currentToken.StartIndex, 1));
			}

			_textBuffer.ReplaceSelectedLines(uncommentedLines);
		}
        private void AddNewTokensToBuffer()
        {
            Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(_textBuffer.GetText(0))));
            Token currentToken = lexer.Next();
            LinkedList<IndexToken> newTokens = new LinkedList<IndexToken>();
            int currentIndex = 0;

            while (currentToken != null)
            {
                _tokenizedBuffer.CurrentState.AddLast(currentToken);
                newTokens.AddLast(new IndexToken(currentIndex, currentToken));
                currentIndex += currentToken.Length;
                currentToken = lexer.Next();
            }

            foreach (var t in newTokens) TokenChanged(this, new TokenChangedEventArgs(t));
        }
        private void ModifyTokensInBuffer(TextChangeData change)
        {
            IndexTokenNode firstToken = _tokenizedBuffer.CurrentState.FindTokenBeforeIndex(change.Position);
            Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader(_textBuffer.GetText(firstToken.IndexToken.StartIndex))));
            int oldBufferStartIndex = firstToken.IndexToken.StartIndex + change.Delta;
            int newBufferStartIndex = firstToken.IndexToken.StartIndex;
            int endPosition = change.Position + change.LengthOfChangedText;
            LinkedList<LinkedListNode<Token>> oldTokens = new LinkedList<LinkedListNode<Token>>();
            LinkedList<IndexToken> newTokens = new LinkedList<IndexToken>();
            Token newToken = lexer.Next();
            IndexTokenNode oldToken = firstToken;

            while (newBufferStartIndex + newToken.Length != oldBufferStartIndex + oldToken.IndexToken.Token.Length
                || (change.Delta < 0 && oldToken.IndexToken.StartIndex + oldToken.IndexToken.Token.Length < endPosition)
                || (change.Delta > 0 && newBufferStartIndex + newToken.Length < endPosition))
            {
                if (newBufferStartIndex + newToken.Length < oldBufferStartIndex + oldToken.IndexToken.Token.Length)
                {
                    newTokens.AddLast(new IndexToken(newBufferStartIndex, newToken));
                    newBufferStartIndex += newToken.Length;
                    newToken = lexer.Next();
                }
                else
                {
                    oldTokens.AddLast(oldToken.Node);
                    oldBufferStartIndex += oldToken.IndexToken.Token.Length;
                    oldToken = oldToken.Next();
                }
            }

            oldTokens.AddLast(oldToken.Node);
            newTokens.AddLast(new IndexToken(newBufferStartIndex, newToken));
            foreach (var t in newTokens) _tokenizedBuffer.CurrentState.AddBefore(firstToken.Node, t.Token);
            foreach (var t in oldTokens) _tokenizedBuffer.CurrentState.Remove(t);
            foreach (var t in newTokens) TokenChanged(this, new TokenChangedEventArgs(t));
        }
예제 #13
0
		public void ShouldReturnNumberFollowedByCharacter()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("123\\s")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Number, token.Type);
			Assert.AreEqual("123", token.Text);
			token = lexer.Next();
			Assert.AreEqual(TokenType.Character, token.Type);
			Assert.AreEqual("\\s", token.Text);
		}
예제 #14
0
		public void ShouldReadBackslashUFollowedByThreeHexDigitsAndAZAsSingleUCharacterFollowedByASymbol()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\uAF9Z")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Character, token.Type);
			Assert.AreEqual("\\u", token.Text);
			token = lexer.Next();
			Assert.AreEqual(TokenType.Symbol, token.Type);
			Assert.AreEqual("AF9Z", token.Text);
		}
예제 #15
0
		public void ShouldReadBackslashUFollowedByTwoHexDigitsAsSingleUCharacterFollowedByANumber()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\\u19")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Character, token.Type);
			Assert.AreEqual("\\u", token.Text);
			token = lexer.Next();
			Assert.AreEqual(TokenType.Number, token.Type);
			Assert.AreEqual("19", token.Text);
		}
예제 #16
0
		public void ShouldStopParsingSymbolWhenDoubleQuoteFound()
		{
			var stream = new PushBackCharacterStream(new StringReader("asdf\"str\""));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Symbol, token.Type);
			Assert.AreEqual("asdf", token.Text);

			token = lexer.Next();
			Assert.AreEqual(TokenType.String, token.Type);
			Assert.AreEqual("\"str\"", token.Text);
		}
예제 #17
0
		public void ShouldReturnCommentToEndOfLineOnly()
		{
			var stream = new PushBackCharacterStream(new StringReader("; test\r\n123"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Comment, token.Type);
			Assert.AreEqual("; test", token.Text);
		}
예제 #18
0
		public void ShouldReturnStringTokenWhenInputIsOnlyADoubleQuote()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\"")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.String, token.Type);
			Assert.AreEqual("\"", token.Text);
		}
예제 #19
0
		public void ShouldReturnBooleanWhenFalseIsInput()
		{
			var stream = new PushBackCharacterStream(new StringReader("false"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Boolean, token.Type);
			Assert.AreEqual("false", token.Text);
			Assert.IsFalse(stream.HasMore);
		}
예제 #20
0
		public void ShouldReturnRealNumber()
		{
			var stream = new PushBackCharacterStream(new StringReader("123.321"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Number, token.Type);
			Assert.AreEqual("123.321", token.Text);
		}
예제 #21
0
		public void ShouldReturnTwoSymbolsSeparatedByWhitespace()
		{
			var stream = new PushBackCharacterStream(new StringReader("symbol1 symbol2"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Symbol, token.Type);
			Assert.AreEqual("symbol1", token.Text);

			token = lexer.Next();
			Assert.AreEqual(TokenType.Whitespace, token.Type);

			token = lexer.Next();
			Assert.AreEqual(TokenType.Symbol, token.Type);
			Assert.AreEqual("symbol2", token.Text);
		}
예제 #22
0
		public void ShouldReturnSymbolImmediatelyFollowedByComment()
		{
			var stream = new PushBackCharacterStream(new StringReader("test;comment"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Symbol, token.Type);
			Assert.AreEqual("test", token.Text);

			token = lexer.Next();
			Assert.AreEqual(TokenType.Comment, token.Type);
			Assert.AreEqual(";comment", token.Text);
		}
예제 #23
0
		public void ShouldReturnSymbolWhenItHasADot()
		{
			var stream = new PushBackCharacterStream(new StringReader("namespace.test"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Symbol, token.Type);
			Assert.AreEqual("namespace.test", token.Text);
		}
예제 #24
0
		public void ShouldReturnSymbolWithOnlyASingleAmpersand()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("&")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Symbol, token.Type);
			Assert.AreEqual("&", token.Text);
		}
예제 #25
0
		public void ShouldReturnIgnoreReaderMacro()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("#_(defn")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.IgnoreReaderMacro, token.Type);
			Assert.AreEqual("#_", token.Text);
		}
예제 #26
0
		public void ShouldReturnNil()
		{
			var stream = new PushBackCharacterStream(new StringReader("nil"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Nil, token.Type);
			Assert.AreEqual("nil", token.Text);
			Assert.IsFalse(stream.HasMore);
		}
예제 #27
0
		public void ShouldReturnKeywordWithNoName()
		{
			var stream = new PushBackCharacterStream(new StringReader(":"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Keyword, token.Type);
			Assert.AreEqual(":", token.Text);
		}
예제 #28
0
		public void ShouldReturnCommentWithTrailingWhitespace()
		{
			var stream = new PushBackCharacterStream(new StringReader("; test text  \r\n"));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Comment, token.Type);
			Assert.AreEqual("; test text  ", token.Text);

			token = lexer.Next();
			Assert.AreEqual(TokenType.Whitespace, token.Type);
			Assert.AreEqual("\r\n", token.Text);
		}
예제 #29
0
		public void ShouldReturnStringForProperlyTerminatingString()
		{
			Lexer lexer = new Lexer(new PushBackCharacterStream(new StringReader("\"asdf\"")));
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.String, token.Type);
			Assert.AreEqual("\"asdf\"", token.Text);
		}
예제 #30
0
		public void ShouldReturnKeywordFollowByListStart()
		{
			var stream = new PushBackCharacterStream(new StringReader(":asdf("));
			Lexer lexer = new Lexer(stream);
			Token token = lexer.Next();
			Assert.AreEqual(TokenType.Keyword, token.Type);
			Assert.AreEqual(":asdf", token.Text);
			Assert.AreEqual(TokenType.ListStart, lexer.Next().Type);
		}