protected void WhenTokenized() { var testCase = TestCase.Current; var lexerRuntimeInfo = new LexerRuntimeInfo(new LexerStringReader(testCase.Query, testCase.Position)); testCase.Result = GetTokenizer(testCase.Config).Tokenize(ref lexerRuntimeInfo); }
protected TokenizationResult EnsureTrailingSpecialChar(ref LexerRuntimeInfo info, TokenizationResult result) { if (result.Success == false) { return(result); } ref var reader = ref info.Reader;
public override TokenizationResult Tokenize(ref LexerRuntimeInfo info) { var startPosition = info.Reader.CurrentPosition; var whitespaceSpan = info.Reader.ReadTillEndOfWhitespace(); if (whitespaceSpan.Length == 0) { return(NoWhitespaceResult); } var token = new WhitespaceToken(startPosition, whitespaceSpan.Length); return(TokenizationResult.Successful(token)); }
public override TokenizationResult Tokenize(ref LexerRuntimeInfo info) { var startPosition = info.Reader.CaptureCurrentPosition(); var found = _keywordsTrie.TryFind(info.Reader, out var keyword, out var readLength); if (found == false) { return(TokenizationResult.Failed()); } var token = new KeywordToken(startPosition.Value, readLength, keyword); var result = TokenizationResult.Successful(token); return(EnsureTrailingSpecialChar(ref info, result)); }
public abstract TokenizationResult Tokenize(ref LexerRuntimeInfo info);
public override TokenizationResult Tokenize(ref LexerRuntimeInfo info) { ref var reader = ref info.Reader;