示例#1
0
        public void Tokenize_IsInvalidZeroToken()
        {
            List <string> listValue = new List <string>();

            listValue.Add(".00");

            ITokenizer          tokenizer = new RegexTokenizer();
            IEnumerable <Token> tokens    = tokenizer.Tokenize(listValue);
        }
示例#2
0
        public TokenDefintion(string pattern)
        {
            Priority = -1;
            Ignore   = true;

            var tokenizer = new RegexTokenizer();
            var tokens    = tokenizer.Tokenize(pattern);
            var compiler  = new RegexParser(tokens);

            Regex = compiler.Parse();
        }
示例#3
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="tokenizer"></param>
        /// <param name="sqlStatements"></param>
        public void OutputTokenList(RegexTokenizer tokenizer, SqlStatement sqlStatement)
        {
            sqlStatement.tokenList.AddRange(tokenizer.Tokenize(sqlStatement.StatementValue));

            sqlStatement.tokenList.ForEach(x => Console.WriteLine($"Token Type: {x.TokenType}, Token Value: {x.Value}"));

            //var temp = parser.Parse(sqlStatement.tokenList);
            OutputParseTree(sqlStatement);

            Console.WriteLine($"End of statement.{Environment.NewLine}");
        }
示例#4
0
        public TokenDefintion(TType type, string pattern, int priority)
        {
            Type     = type;
            Priority = priority;
            Ignore   = false;

            var tokenizer = new RegexTokenizer();
            var tokens    = tokenizer.Tokenize(pattern);
            var compiler  = new RegexParser(tokens);

            Regex = compiler.Parse();
        }
示例#5
0
        public void CanTokenizeAndParseQueryString()
        {
            string query = @"
MATCH app = 'MyTestApp'
AND ex IN ('System.NullReferenceException', 'System.FormatException')
BETWEEN 2016-01-01 00:00:00 AND 2016-02-01 00:00:00
LIMIT 100
";

            var tokenizer     = new RegexTokenizer();
            var tokenSequence = tokenizer.Tokenize(query).ToList();

            var parser             = new QueryParser();
            var dataRepresentation = parser.Parse(tokenSequence);

            var json = JsonConvert.SerializeObject(dataRepresentation, Formatting.Indented, SerializerSettings);

            Assert.NotNull(json);
        }
        public void ValidateFile()
        {
            try
            {
                //Get all lines of the file as tokens
                ITokenizer tokenizer     = new RegexTokenizer();
                var        tokenSequence = tokenizer.Tokenize(_fileContent).ToList();
                //Parse all tokens
                Parser p = new Parser();
                _expenses = p.Parse(tokenSequence);

                if (_expenses.Count == 0)
                {
                    throw new Exception("Nothing to Process");
                }
            }
            catch (Exception)
            {
                throw;
            }
        }