Exemple #1
0
        public void GetsSomeTokens()
        {
            var subject = new SqlLexer();

            var result = subject.GetTokens("SELECT * FROM Customers");

            Check.That(result).HasSize(7);
        }
Exemple #2
0
        public void IgnoresCase()
        {
            var subject = new SqlLexer();

            var result = subject.GetTokens("select * from Customers");

            Check.That(result.First()).IsEqualTo(
                new Token(0, TokenTypes.Keyword, "select"));
        }
Exemple #3
0
        private Sql.DmlDdlSqlStatement BuildStatement(string query, InputStringNormalizer stringNormalizer)
        {
            query = stringNormalizer.InputForLexer;

            var lexbuf = LexBuffer <char> .FromString(query);

            Func <LexBuffer <char>, SqlParser.token> func = (x) => SqlLexer.tokenize(x);

            return(SqlParser.startCT(FuncConvert.FromFunc(func), lexbuf));
        }
        internal ISQLStatement InternalParse(string sql)
        {
            var inputStream = new AntlrInputStream(sql);
            var lexer       = new SqlLexer(inputStream);
            var tokens      = new CommonTokenStream(lexer);
            var parser      = new SqlParser(tokens);
            var visitor     = new GCSqlVisitor(parser, sql);

            return((ISQLStatement)visitor.Visit(parser.parse()));
        }
Exemple #5
0
        public static SyntaxNode BuildSyntaxTree(string statement)
        {
            if (string.IsNullOrWhiteSpace(statement))
            {
                throw new ArgumentException("No statement to process", nameof(statement));
            }

            var tokens     = SqlLexer.Tokenise(statement, OperationTokens.AllTokens);
            var syntaxTree = SyntaxTreeBuilder.Build(tokens);

            return(syntaxTree);
        }
Exemple #6
0
        public void CorrectlyTokenisesUseExample(string statement)
        {
            var result         = SqlLexer.Tokenise(statement, OperationTokens.AllTokens);
            var expectedResult = new List <SyntaxToken>
            {
                OperationTokens.Use,
                new SyntaxToken("database1")
            };

            Assert.Equal(expectedResult.Count, result.Count);
            for (int i = 0; i < expectedResult.Count; i++)
            {
                Assert.Equal(expectedResult[i].Value, result[i].Value);
            }
        }
Exemple #7
0
        public void CorrectlyTokenisesInsertExample(string statement)
        {
            var result         = SqlLexer.Tokenise(statement, OperationTokens.AllTokens);
            var expectedResult = new List <SyntaxToken>
            {
                OperationTokens.InsertInto,
                new SyntaxToken("user_notes (id, user_id, note, created)"),
                OperationTokens.Values,
                new SyntaxToken("(1, 1, \"Note 1\", NOW())")
            };

            Assert.Equal(expectedResult.Count, result.Count);
            for (int i = 0; i < expectedResult.Count; i++)
            {
                Assert.Equal(expectedResult[i].Value, result[i].Value);
            }
        }
Exemple #8
0
        public void CorrectlyTokenisesDeleteExample(string statement)
        {
            var result         = SqlLexer.Tokenise(statement, OperationTokens.AllTokens);
            var expectedResult = new List <SyntaxToken>
            {
                OperationTokens.Delete,
                OperationTokens.From,
                new SyntaxToken("database2.logs"),
                OperationTokens.Where,
                new SyntaxToken("id"),
                OperationTokens.LessThan,
                new SyntaxToken("1000")
            };

            Assert.Equal(expectedResult.Count, result.Count);
            for (int i = 0; i < expectedResult.Count; i++)
            {
                Assert.Equal(expectedResult[i].Value, result[i].Value);
            }
        }
Exemple #9
0
        public void CorrectlyTokenisesSelectExample(string statement)
        {
            var result         = SqlLexer.Tokenise(statement, OperationTokens.AllTokens);
            var expectedResult = new List <SyntaxToken>
            {
                OperationTokens.Select,
                new SyntaxToken("id, name, address"),
                OperationTokens.From,
                new SyntaxToken("users"),
                OperationTokens.Where,
                new SyntaxToken("is_customer"),
                OperationTokens.Is,
                new SyntaxToken("NOT NULL"),
                OperationTokens.OrderBy,
                new SyntaxToken("created")
            };

            Assert.Equal(expectedResult.Count, result.Count);
            for (int i = 0; i < expectedResult.Count; i++)
            {
                Assert.Equal(expectedResult[i].Value, result[i].Value);
            }
        }
Exemple #10
0
        public void PassingEmptyStatementReturnsNoTokens(string statement)
        {
            var expectedResult = new List <SyntaxToken>(0);

            Assert.Equal(expectedResult, SqlLexer.Tokenise(statement, OperationTokens.AllTokens));
        }
Exemple #11
0
        public void PassingEmptyOperationTokensThrowsException()
        {
            var operationTokens = new List <OperationToken>().AsReadOnly();

            Assert.Throws <ArgumentException>(() => SqlLexer.Tokenise("", operationTokens));
        }
Exemple #12
0
 public void PassingNullOperationTokensThrowsException()
 {
     Assert.Throws <ArgumentException>(() => SqlLexer.Tokenise("", null));
 }