public void IdkSomeWeirdTest() { TokenType[] expectedTypes = { TokenType.Name, TokenType.Number, TokenType.Colon, TokenType.Comma, TokenType.LeftParen, TokenType.RightParen, TokenType.String }; var buffer = new PeekBuffer <char>("testing 123 : , ( ) \"asd\"".GetEnumerator()); List <ITokenReader> tokenReaders = new List <ITokenReader> { new WhiteSpaceReader(), new NameReader(), new NumberTokenReader(), new StringTokenReader(), new SingleSymbolReader(':', TokenType.Colon), new SingleSymbolReader(',', TokenType.Comma), new SingleSymbolReader('(', TokenType.LeftParen), new SingleSymbolReader(')', TokenType.RightParen) }; Tokenizer tokenizer = new Tokenizer(buffer, tokenReaders); for (int i = 0; i < expectedTypes.Length; i++) { Assert.IsTrue(tokenizer.MoveNext()); Assert.AreEqual(expectedTypes[i].ToString(), tokenizer.Current.Type.ToString()); } Assert.IsFalse(tokenizer.MoveNext()); }
public TokenInfo ReadToken(PeekBuffer <char> buffer) { bool seenDecimal = false; StringBuilder number = new StringBuilder(); while (buffer.TryPeek(out var nextChar)) { if (char.IsDigit(nextChar)) { number.Append(nextChar); buffer.TryRead(out _); } else if (!seenDecimal && nextChar == '.') { seenDecimal = true; number.Append(nextChar); buffer.TryRead(out _); } else { break; } } return(new TokenInfo(TokenType.Number, double.Parse(number.ToString()))); }
public void TryRead_NoMoreItems() { var enumerator = "".GetEnumerator(); PeekBuffer <char> buffer = new PeekBuffer <char>(enumerator); Assert.IsFalse(buffer.TryRead(out var _)); }
public void PeekBeforeRead() { var enumerator = "1234".GetEnumerator(); PeekBuffer <char> buffer = new PeekBuffer <char>(enumerator); Assert.IsTrue(buffer.TryPeek(out var item)); Assert.AreEqual('1', item); Assert.IsTrue(buffer.TryPeek(1, out item)); Assert.AreEqual('2', item); Assert.IsTrue(buffer.TryRead(out item)); Assert.AreEqual('1', item); Assert.IsTrue(buffer.TryRead(out item)); Assert.AreEqual('2', item); Assert.IsTrue(buffer.TryPeek(out item)); Assert.AreEqual('3', item); Assert.IsTrue(buffer.TryPeek(1, out item)); Assert.AreEqual('4', item); Assert.IsTrue(buffer.TryRead(out item)); Assert.AreEqual('3', item); Assert.IsTrue(buffer.TryRead(out item)); Assert.AreEqual('4', item); }
public void TryPeek_NegativeIndex() { var enumerator = "1".GetEnumerator(); PeekBuffer <char> buffer = new PeekBuffer <char>(enumerator); buffer.TryPeek(-1, out char _); }
public bool CheckToken(PeekBuffer <char> buffer) { return(buffer.TryPeek(out var first) && buffer.TryPeek(1, out var second) && first == second && first == '/'); }
public void TryRead_Sequential() { var enumerator = "123".GetEnumerator(); PeekBuffer <char> buffer = new PeekBuffer <char>(enumerator); Assert.IsTrue(buffer.TryRead(out var item)); Assert.AreEqual('1', item); Assert.IsTrue(buffer.TryRead(out item)); Assert.AreEqual('2', item); Assert.IsTrue(buffer.TryRead(out item)); Assert.AreEqual('3', item); }
public void TryPeek_Sequential() { var enumerator = "123".GetEnumerator(); PeekBuffer <char> buffer = new PeekBuffer <char>(enumerator); Assert.IsTrue(buffer.TryPeek(0, out var peeked)); Assert.AreEqual('1', peeked); Assert.IsTrue(buffer.TryPeek(1, out peeked)); Assert.AreEqual('2', peeked); Assert.IsTrue(buffer.TryPeek(2, out peeked)); Assert.AreEqual('3', peeked); }
public void TryPeek_OutOfOrder() { var enumerator = "ABC".GetEnumerator(); PeekBuffer <char> buffer = new PeekBuffer <char>(enumerator); Assert.IsTrue(buffer.TryPeek(0, out var peeked)); Assert.AreEqual('A', peeked); Assert.IsTrue(buffer.TryPeek(2, out peeked)); Assert.AreEqual('C', peeked); Assert.IsTrue(buffer.TryPeek(1, out peeked)); Assert.AreEqual('B', peeked); }
public bool IsProduction(PeekBuffer <TokenInfo> tokenStream, int index = 0) { for (int i = 0; i < productionReaders.Length; i++) { if (validReaders[i] && productionReaders[i].IsProduction(tokenStream)) { confirmedReader = i; return(true); } } return(false); }
public ProductionInfo ReadProduction(PeekBuffer <TokenInfo> tokenStream) { StringBuilder data = new StringBuilder(); tokenStream.TryRead(out var nameToken); tokenStream.TryRead(out _); data.Append(nameToken.Data.ToString()); data.Append("("); while (tokenStream.TryPeek(out var next)) { if (next.Type == TokenType.RightParen) { tokenStream.TryRead(out _); data.Append(")"); return(new ProductionInfo(data.ToString())); } // Read next token if (!ExpressionReader.FirstToken(next)) { return(null); } if (!ExpressionReader.IsProduction(tokenStream)) { return(null); } var expression = ExpressionReader.ReadProduction(tokenStream); if (expression == null) { return(null); } data.Append(expression.Data); // The token after an expression should be comma or right-paren if (!tokenStream.TryPeek(out next)) { return(null); } if (next.Type == TokenType.RightParen) { continue; } if (next.Type == TokenType.Comma) { tokenStream.TryRead(out _); } } return(null); }
public TokenInfo ReadToken(PeekBuffer <char> buffer) { buffer.TryRead(out var startingChar); StringBuilder str = new StringBuilder(); while (buffer.TryRead(out var nextChar)) { if (nextChar == startingChar) { break; } str.Append(nextChar); } return(new TokenInfo(TokenType.String, str.ToString())); }
public TokenInfo ReadToken(PeekBuffer <char> buffer) { while (buffer.TryPeek(out var nextChar)) { if (char.IsWhiteSpace(nextChar)) { buffer.TryRead(out _); } else { break; } } return(null); }
public void ReaderCallIsStarting_True() { var buffer = new PeekBuffer <char>("abc".GetEnumerator()); List <ITokenReader> tokenReaders = new List <ITokenReader>(); var testReader = new TestReader(); testReader.IsStarting = true; tokenReaders.Add(testReader); var tokenizer = new Tokenizer(buffer, tokenReaders); tokenizer.MoveNext(); Assert.AreEqual(0, testReader.IsStartingCalled); Assert.AreEqual(1, testReader.CheckTokenCalled); }
public TokenInfo ReadToken(PeekBuffer <char> buffer) { ReadTokenCalled = CallOrder++; for (int i = 0; i < CharsToRead; i++) { buffer.TryRead(out _); } if (ReturnTokenInfo) { if (Data != null) { return(new TokenInfo(Type, Data)); } return(new TokenInfo(Type)); } return(null); }
public TokenInfo ReadToken(PeekBuffer <char> buffer) { StringBuilder name = new StringBuilder(); while (buffer.TryPeek(out var nextChar)) { if (char.IsLetterOrDigit(nextChar)) { buffer.TryRead(out var c); name.Append(c); } else { break; } } return(new TokenInfo(TokenType.Name, name.ToString())); }
public void ReaderCallSequential() { var buffer = new PeekBuffer <char>("abc".GetEnumerator()); List <ITokenReader> tokenReaders = new List <ITokenReader>(); var testReader1 = new TestReader(); var testReader2 = new TestReader(); var testReader3 = new TestReader(); tokenReaders.Add(testReader1); tokenReaders.Add(testReader2); tokenReaders.Add(testReader3); var tokenizer = new Tokenizer(buffer, tokenReaders); tokenizer.MoveNext(); Assert.IsTrue(testReader1.IsStartingCalled < testReader2.IsStartingCalled); Assert.IsTrue(testReader2.IsStartingCalled < testReader3.IsStartingCalled); }
public TokenInfo ReadToken(PeekBuffer <char> buffer) { buffer.TryRead(out _); buffer.TryRead(out _); bool nextLineSeen = false; while (buffer.TryPeek(out var nextChar)) { if (nextChar == '\r' || nextChar == '\n') { nextLineSeen = true; } else if (nextLineSeen) { break; } buffer.TryRead(out _); } return(null); }
public ProductionInfo ReadProduction(PeekBuffer <TokenInfo> tokenStream) { return(productionReaders[confirmedReader].ReadProduction(tokenStream)); }
public TokenInfo ReadToken(PeekBuffer <char> buffer) { buffer.TryRead(out _); return(new TokenInfo(type)); }
public bool CheckToken(PeekBuffer <char> buffer) { CheckTokenCalled = CallOrder++; return(IsToken); }
public bool CheckToken(PeekBuffer <char> buffer) { return(true); }
/// <summary> /// Initializes a new <see cref="CppTextReader"/>. /// </summary> /// <param name="source">The source to read characters from.</param> public CppTextReader(IEnumerable <char> source) { this.source = new PeekBuffer <char>(source); }
public ProductionInfo ReadProduction(PeekBuffer <TokenInfo> tokenStream) { tokenStream.TryRead(out var token); return(new ProductionInfo(token.Data)); }
public bool IsProduction(PeekBuffer <TokenInfo> tokenStream, int index = 0) { return(true); }
/// <summary> /// Initializes a new <see cref="Lexer"/>. /// </summary> /// <param name="source">The source <see cref="IEnumerable{PositionedChar}"/> that yields /// characters with physical positions.</param> public Lexer(IEnumerable <PositionedChar> source) { this.source = new PeekBuffer <PositionedChar>(source); }
public bool IsProduction(PeekBuffer <TokenInfo> tokenStream, int index = 0) { return(tokenStream.TryPeek(index++, out var first) && first.Type == TokenType.Name && tokenStream.TryPeek(index, out var second) && second.Type == TokenType.LeftParen); }
/// <summary> /// Pre-processes the whole input. /// </summary> /// <param name="tokens">The <see cref="IEnumerable{Token}"/>s to pre-process.</param> /// <returns>The <see cref="IEnumerable{Token}"/> of the pre-processed input.</returns> public IEnumerable <Token> Process(IEnumerable <Token> tokens) { source = new PeekBuffer <Token>(tokens); return(All()); }
public Tokenizer(PeekBuffer <char> input, List <ITokenReader> tokenReaders) { this.input = input; this.tokenReaders = tokenReaders; }
public Parser(IEnumerable <Token> tokens) { this.tokens = new PeekBuffer <Token>(tokens); }