Exemple #1
0
        private static void AssertTokenizationValues(TokenizationResult result, params object[] values)
        {
            var tokens = result.Tokens.ToArray();

            Assert.AreEqual(tokens.Length, values.Length);
            CollectionAssert.AreEqual(values, tokens.Select(x => x.GetObjectValue()));
        }
Exemple #2
0
        private static void AssertTokenizationTypes(TokenizationResult result, params Type[] types)
        {
            Assert.IsNull(result.Exception);
            Assert.IsTrue(result.UnmatchedCharactersLeft == 0);
            Assert.IsNotNull(result.Tokens);
            var tokens = result.Tokens.ToArray();

            Assert.AreEqual(tokens.Length, types.Length);
            for (var i = 0; i < tokens.Length; ++i)
            {
                Assert.AreSame(tokens[i].GetType(), types[i]);
            }
        }
Exemple #3
0
        public override TokenizationResult Tokenize(ref LexerRuntimeInfo info)
        {
            var startPosition  = info.Reader.CurrentPosition;
            var whitespaceSpan = info.Reader.ReadTillEndOfWhitespace();

            if (whitespaceSpan.Length == 0)
            {
                return(NoWhitespaceResult);
            }

            var token = new WhitespaceToken(startPosition, whitespaceSpan.Length);

            return(TokenizationResult.Successful(token));
        }
Exemple #4
0
 public TokenizationResult Tokenize(string sourceString)
 {
     TokenizationResult result = new TokenizationResult();
       result.SourceString = sourceString;
       if (string.IsNullOrWhiteSpace(sourceString))
       {
     result.ErrorMessage = "Input string is empty";
     result.Tokens = null;
     result.Succeed = false;
     return result;
       }
       int nextParsePosition = 0;
       TokenType matchedType = TokenType.None;
       result.Tokens = new List<Token>();
       do
       {
     Match match = null;
     matchedType = TokenType.None;
     // Sequence is important to the matches to avoid confusing a term with an atom
     foreach (var pair in ParseRegexes.GetRegexToTokenTypeMappings())
     {
       if ((match = pair.Item1.Match(sourceString, nextParsePosition)).Success
     && match.Index == nextParsePosition)
       {
     matchedType = pair.Item2;
     break;
       }
     }
     if (matchedType != TokenType.None)
     {
       // Comments are removed here
       if (matchedType != TokenType.SingleLineComment)
       {
     result.Tokens.Add(new Token
     {
       From = match.Groups[1].Index,
       Length = match.Groups[1].Length,
       SourceString = sourceString,
       TokenType = matchedType
     });
       }
       nextParsePosition += match.Length;
     }
       }
       while (nextParsePosition < sourceString.Length && matchedType != TokenType.None);
       result.Succeed = nextParsePosition == sourceString.Length;
       if (!result.Succeed) result.ErrorMessage = "Unexpected characters at position " + nextParsePosition;
       return result;
 }
        public override TokenizationResult Tokenize(ref LexerRuntimeInfo info)
        {
            var startPosition = info.Reader.CaptureCurrentPosition();

            var found = _keywordsTrie.TryFind(info.Reader, out var keyword, out var readLength);

            if (found == false)
            {
                return(TokenizationResult.Failed());
            }

            var token  = new KeywordToken(startPosition.Value, readLength, keyword);
            var result = TokenizationResult.Successful(token);

            return(EnsureTrailingSpecialChar(ref info, result));
        }
Exemple #6
0
        private static void AssertTokenizationResult(TokenizationResult result, int unmatchedCharacters, Type exception = null, params Type[] types)
        {
            if (exception != null)
            {
                Assert.AreEqual(result.Exception.GetType(), exception);
            }
            else
            {
                Assert.IsNull(result.Exception);
            }
            Assert.IsTrue(result.UnmatchedCharactersLeft == unmatchedCharacters);
            Assert.IsNotNull(result.Tokens);
            var tokens = result.Tokens.ToArray();

            Assert.AreEqual(tokens.Length, types.Length);
            for (var i = 0; i < tokens.Length; ++i)
            {
                Assert.AreSame(tokens[i].GetType(), types[i]);
            }
        }
        protected TokenizationResult EnsureTrailingSpecialChar(ref LexerRuntimeInfo info, TokenizationResult result)
        {
            if (result.Success == false)
            {
                return(result);
            }

            ref var reader = ref info.Reader;