예제 #1
0
        public void SnQuery_Lexer_Range_Brackets()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = CqlLexer.Token.Field, Value = "Number"
                },
                new TokenChecker {
                    Token = CqlLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = CqlLexer.Token.LBracket, Value = "["
                },
                new TokenChecker {
                    Token = CqlLexer.Token.Number, Value = "1234"
                },
                new TokenChecker {
                    Token = CqlLexer.Token.To, Value = "TO"
                },
                new TokenChecker {
                    Token = CqlLexer.Token.Number, Value = "5678"
                },
                new TokenChecker {
                    Token = CqlLexer.Token.RBracket, Value = "]"
                },
            };
            var tokens = GetTokens("Number:[1234 TO 5678]");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #2
0
        public void Lexer_Range_Braces()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "Number"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.LBrace, Value = "{"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "1234"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.To, Value = "TO"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "5678"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.RBrace, Value = "}"
                },
            };
            var tokens = GetTokens("Number:{1234 TO 5678}");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #3
0
        public void Lexer_FieldListFieldPrefix()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "#Field1"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "value"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "#Field2"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "value"
                },
            };
            var tokens = GetTokens("#Field1:value #Field2:value");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #4
0
        public void Lexer_FieldBadLimiters()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "Field"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "value"
                },

                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "Field>value"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "Field<value"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "Field>=value"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "Field<=value"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "Field<>value"
                },
            };
            var tokens = GetTokens(" Field:value Field>value Field<value Field>=value Field<=value Field<>value ");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #5
0
        public void Lexer_FieldGrouping()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "title"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.LParen, Value = "("
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "return"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "pink panther"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.RParen, Value = ")"
                },
            };
            var tokens = GetTokens("title:(+return +\"pink panther\")");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #6
0
        public void Lexer_TwoPaths()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "Ancestor"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "/Root/System"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "Path"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "/Root/System"
                }
            };
            var tokens = GetTokens("+Ancestor:/Root/System -Path:/Root/System");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #7
0
 public void Simple()
 {
     TokenChecker
     .Of("hello")
     .Nl("  bar")
     .Raw(NORMAL, NL, NORMAL, EOF_TOKEN)
     .Dented(NORMAL, INDENT, NORMAL, NL, DEDENT, EOF_TOKEN);
 }
예제 #8
0
 public void IgnoreEofNoDedent()
 {
     TokenChecker
     .Of("hello")
     .Raw(NORMAL, EOF_TOKEN)
     .IgnoreEof()
     .Dented(NORMAL, EOF_TOKEN);
 }
예제 #9
0
        public void Lexer_TextAndNumber()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "text"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "9"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "text"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "12.34"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.ControlKeyword, Value = ".SKIP"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "12aa"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "a12"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "12.aa"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "aa12."
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "12.34aa"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "12"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "12.34"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.ControlKeyword, Value = ".TOP"
                }
            };
            var tokens = GetTokens("text 9 text 12.34 .SKIP 12aa a12 12.aa aa12. 12.34aa -12 -12.34 -.TOP");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #10
0
 public void MultipleDedentsToEof()
 {
     TokenChecker
     .Of("hello")
     .Nl("  line2")
     .Nl("    line3")
     .Raw(NORMAL, NL, NORMAL, NL, NORMAL, EOF_TOKEN)
     .Dented(NORMAL, INDENT, NORMAL, INDENT, NORMAL, NL, DEDENT, DEDENT, EOF_TOKEN);
 }
예제 #11
0
        public void CheckToken_TestReplacement_ReturnsExpected(string sourceCode, string token, int position, bool isToken)
        {
            //Don't know why, but NCrunch can't recognise this pattern of test. Run it manually though and you'll see it's fine.
            //Arrange
            TokenChecker tokenChecker = new TokenChecker();

            //Act & Assert
            Assert.AreEqual(isToken, tokenChecker.CheckIsToken(sourceCode, token, position));
        }
예제 #12
0
 public void IgnoreEofWithDedent()
 {
     TokenChecker
     .Of("hello")
     .Nl("  world")
     .Raw(NORMAL, NL, NORMAL, EOF_TOKEN)
     .IgnoreEof()
     .Dented(NORMAL, INDENT, NORMAL, EOF_TOKEN);
 }
예제 #13
0
 public void WithReturn()
 {
     TokenChecker
     .Of("hello")
     .Nl("world")
     .Rf("dolly")
     .Raw(NORMAL, NL, NORMAL, NL, NORMAL, EOF_TOKEN)
     .Dented(NORMAL, NL, NORMAL, NL, NORMAL, NL, EOF_TOKEN);
 }
예제 #14
0
 public void HalfDent()
 {
     TokenChecker
     .Of("hello")
     .Nl("     world")
     .Nl("  boom")
     .Raw(NORMAL, NL, NORMAL, NL, NORMAL, EOF_TOKEN)
     .Dented(NORMAL, INDENT, NORMAL, NL, DEDENT, INDENT, NORMAL, NL, DEDENT, EOF_TOKEN);
 }
예제 #15
0
 public void StartIndentedThenEmptyLines()
 {
     TokenChecker
     .Of("    hello")
     .Nl("    line2")
     .Nl("")
     .Raw(NORMAL, NL, NORMAL, NL, EOF_TOKEN)
     .Dented(INDENT, NORMAL, NL, NORMAL, NL, DEDENT, EOF_TOKEN);
 }
예제 #16
0
 public void SimpleWithNLs()
 {
     TokenChecker
     .Of("hello")
     .Nl("world")
     .Nl("  tab1")
     .Nl("  tab2")
     .Raw(NORMAL, NL, NORMAL, NL, NORMAL, NL, NORMAL, EOF_TOKEN)
     .Dented(NORMAL, NL, NORMAL, INDENT, NORMAL, NL, NORMAL, NL, DEDENT, EOF_TOKEN);
 }
예제 #17
0
 public void DedentToNegative()
 {
     // this shouldn't explode, it should just result in an extra dedent
     TokenChecker
     .Of("    hello")
     .Nl("    world")
     .Nl("boom")
     .Raw(NORMAL, NL, NORMAL, NL, NORMAL, EOF_TOKEN)
     .Dented(INDENT, NORMAL, NL, NORMAL, NL, DEDENT, NORMAL, NL, EOF_TOKEN);
 }
예제 #18
0
 public void TabIndents()
 {
     TokenChecker
     .Of("{")
     .Nl("\t\tline1")
     .Nl("\t\tline2")
     .Nl("}")
     .Raw(NORMAL, NL, NORMAL, NL, NORMAL, NL, NORMAL, EOF_TOKEN)
     .Dented(NORMAL, INDENT, NORMAL, NL, NORMAL, NL, DEDENT, NORMAL, NL, EOF_TOKEN);
 }
예제 #19
0
 public void AllIndented()
 {
     TokenChecker
     .Of("    hello")
     .Nl("    line2")
     .Nl("       line3")
     .Nl("    ")
     .Raw(NORMAL, NL, NORMAL, NL, NORMAL, NL, EOF_TOKEN)
     .Dented(INDENT, NORMAL, NL, NORMAL, INDENT, NORMAL, NL, DEDENT, DEDENT, EOF_TOKEN);
 }
예제 #20
0
        public void CheckIsToken_TokenIsInvalid_ExceptionThrown(string token, string expectedError)
        {
            //Don't know why, but NCrunch can't recognise this pattern of test. Run it manually though and you'll see it's fine.
            //Arrange
            TokenChecker tokenChecker = new TokenChecker();

            //Act
            Action action = () => tokenChecker.CheckIsToken("The quick brown fox jumps over a lazy dog", token, 0);

            //Assert
            action.Should().Throw <ArgumentException>().WithMessage(expectedError);
        }
예제 #21
0
        public void CheckIsToken_TokenPositionIsInvalid_ExceptionThrown(string sourceCode, string token, int position, string expectedError)
        {
            //Don't know why, but NCrunch can't recognise this pattern of test. Run it manually though and you'll see it's fine.
            //Arrange
            TokenChecker tokenChecker = new TokenChecker();

            //Act
            Action action = () => tokenChecker.CheckIsToken(sourceCode, token, position);

            //Assert
            action.Should().Throw <ArgumentException>().WithMessage(expectedError);
        }
예제 #22
0
        public void Lexer_Wildcards()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.WildcardString, Value = "startswith*"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.WildcardString, Value = "*endswith"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.WildcardString, Value = "*contains*"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.WildcardString, Value = "starts*ends"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.WildcardString, Value = "startswith?"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.WildcardString, Value = "?endswith"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.WildcardString, Value = "?contains?"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.WildcardString, Value = "starts?ends"
                },
            };
            var tokens = GetTokens("+startswith* -*endswith *contains* +starts*ends -startswith? ?endswith +?contains? -starts?ends");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #23
0
        public void Lexer_Groups()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "Field1"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.LParen, Value = "("
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "aaa"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.LParen, Value = "("
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "bbb"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "ccc"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.RParen, Value = ")"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "ddd"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "eee"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.RParen, Value = ")"
                },
            };
            var tokens = GetTokens("Field1:(+aaa +(bbb ccc) ddd -eee)");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #24
0
        public void Lexer_NonQuotedStringAndEscapes()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "contains:colon"
                }
            };
            var tokens = GetTokens("contains\\:colon");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #25
0
        public void Lexer_FieldNameInQuotedString()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "contains:colon"
                }
            };
            var tokens = GetTokens("\"contains:colon\"");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #26
0
            public static TokenChecker Of(string firstLine)
            {
                var tb          = new TokenChecker();
                var lineBuilder = new LineBuilder(0, tb._tokens);
                int leading     = LeadingSpacesOf(firstLine);

                lineBuilder.Pos = leading;
                firstLine       = firstLine.Substring(leading);
                if (firstLine.Length != 0)
                {
                    lineBuilder.AddToken("", firstLine, NORMAL);
                }
                return(tb);
            }
예제 #27
0
 public void IgnoreBlankLines()
 {
     TokenChecker
     .Of("hello")
     .Nl("     ")
     .Nl("")
     .Nl("  dolly")
     .Nl("        ")
     .Nl("    ")
     .Nl("")
     .Nl("world")
     .Raw(NORMAL, NL, NL, NL, NORMAL, NL, NL, NL, NL, NORMAL, EOF_TOKEN)
     .Dented(NORMAL, INDENT, NORMAL, NL, DEDENT, NORMAL, NL, EOF_TOKEN);
 }
예제 #28
0
        public void Lexer_X()
        {
            //dump: BooleanQuery(Clause(Occur(), TermQuery(Term(F:a))), Clause(Occur(), BooleanQuery(Clause(Occur(+), TermQuery(Term(G:b))), Clause(Occur(-), TermQuery(Term(F:d))))))
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "F"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.LParen, Value = "("
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "a"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.LParen, Value = "("
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Plus, Value = "+"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "G"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "b"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "d"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.RParen, Value = ")"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.RParen, Value = ")"
                }
            };
            var tokens = GetTokens("F:(a (+G:b -d))");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #29
0
        public void Lexer_Numbers()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "NumberField"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "45678"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "NumberField"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "456.78"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "NumberField"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Minus, Value = "-"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Number, Value = "78.456"
                }
            };
            var tokens = GetTokens("NumberField:45678 NumberField:456.78 -NumberField:-78.456");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #30
0
        public void Lexer_StringWithInnerApos()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker {
                    Token = SnLucLexer.Token.Field, Value = "fieldname"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.Colon, Value = ":"
                },
                new TokenChecker {
                    Token = SnLucLexer.Token.String, Value = "text text 'text' text"
                }
            };
            var tokens = GetTokens("fieldname:\"text text 'text' text\"");
            var msg    = CheckTokensAndEof(tokens, expectedTokens);

            Assert.IsNull(msg, msg);
        }
예제 #31
0
        public Lexer(string[] keywords , string[] symbols , bool ignoreWhiteSpaces = true , TokenChecker<char> whitespaceTokenChecker = null)
        {
            var keywordParsers = from keyword in keywords
                                 select create<keyword>(@seq(
                                     from _char in keyword.ToCharArray() select fill("chars", token(_char))));
            var symbolParsers = from symbol in symbols
                                select create<symbol>(@seq(
                                    from _char in symbol.ToCharArray() select fill("chars", token(_char))));
            var numberParsers =
            new[] {
                create<number>(oneOrMore(fill("chars" , digit))) ,
                create<number>(seq(zeroOrMore(fill("chars" , digit)) , fill("chars" , token('.')) , oneOrMore(fill("chars" , digit))))
            };
            var quotation = Token('"');
            var notquotatoin = fill("chars", toParser(ch => ch != '"'));
            var qoutationEscape = seq(toParser(ch => ch == '\\'), fill("chars", token('"')));
            var textParser = seq(quotation, create<text>(zeroOrMoreAny(qoutationEscape, notquotatoin)), quotation);

            var identifierParser = create<identifier>(seq(fill("chars", letter), zeroOrMore(fill("chars", letter_or_digit))));

            Parser ignoreParsers = () => false;
            if (ignoreWhiteSpaces)
            {
                if (whitespaceTokenChecker != null)
                    Whitespace = toParser(whitespaceTokenChecker);
                ignoreParsers = oneOrMoreAny(seq(Whitespace));
            }

            var tokenParser = fill("tokens", any(
                                 @any(keywordParsers),
                                 identifierParser,
                                 @any(numberParsers),
                                 textParser,
                                 @any(symbolParsers)
                                ));

            if (ignoreWhiteSpaces)
            {
                rootParser = create<TokenList>(seq(zeroOrMoreAny(tokenParser, ignoreParsers), end_of_file()));
            }
        }
예제 #32
0
 public void Lexer_FieldListFieldPrefix()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Field, Value = "#Field1" },
         new TokenChecker { Token = SnLucLexer.Token.Colon,  Value = ":" },
         new TokenChecker { Token = SnLucLexer.Token.String,  Value = "value" },
         new TokenChecker { Token = SnLucLexer.Token.Field, Value = "#Field2" },
         new TokenChecker { Token = SnLucLexer.Token.Colon,  Value = ":" },
         new TokenChecker { Token = SnLucLexer.Token.String,  Value = "value" },
     };
     var tokens = GetTokens("#Field1:value #Field2:value");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #33
0
 public void Lexer_FieldGrouping()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Field, Value = "title" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.LParen,  Value = "(" },
         new TokenChecker { Token = SnLucLexer.Token.Plus, Value = "+" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "return" },
         new TokenChecker { Token = SnLucLexer.Token.Plus,  Value = "+" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "pink panther" },
         new TokenChecker { Token = SnLucLexer.Token.RParen, Value= ")" },
     };
     var tokens = GetTokens("title:(+return +\"pink panther\")");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #34
0
 public void Lexer_TextAndNumber()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "text" },
         new TokenChecker { Token = SnLucLexer.Token.Number, Value = "9" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "text" },
         new TokenChecker { Token = SnLucLexer.Token.Number, Value = "12.34" },
         new TokenChecker { Token = SnLucLexer.Token.ControlKeyword, Value = ".SKIP" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "12aa" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "a12" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "12.aa" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "aa12." },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "12.34aa" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,  Value = "-" },
         new TokenChecker { Token = SnLucLexer.Token.Number, Value = "12" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,  Value = "-" },
         new TokenChecker { Token = SnLucLexer.Token.Number, Value = "12.34" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,  Value = "-" },
         new TokenChecker { Token = SnLucLexer.Token.ControlKeyword, Value = ".TOP" }
     };
     var tokens = GetTokens("text 9 text 12.34 .SKIP 12aa a12 12.aa aa12. 12.34aa -12 -12.34 -.TOP");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #35
0
 public void Lexer_Keywords()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.ControlKeyword, Value = ".AUTOFILTERS" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.String,  Value = "OFF" },
         new TokenChecker { Token = SnLucLexer.Token.ControlKeyword, Value = ".SKIP" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.Number,  Value = "100" },
         new TokenChecker { Token = SnLucLexer.Token.ControlKeyword, Value = ".TOP" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.Number,  Value = "20" },
         new TokenChecker { Token = SnLucLexer.Token.ControlKeyword,  Value = ".SORT" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.String,  Value = "FieldName1" },
         new TokenChecker { Token = SnLucLexer.Token.ControlKeyword,  Value = ".REVERSESORT" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.String,  Value = "FieldName2" },
         new TokenChecker { Token = SnLucLexer.Token.ControlKeyword,  Value = ".SORT" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.String,  Value = "FieldName3" },
     };
     var tokens = GetTokens(".AUTOFILTERS:OFF .SKIP:100 .TOP:20 .SORT:FieldName1 .REVERSESORT:FieldName2 .SORT:FieldName3");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #36
0
 public void Lexer_Numbers()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Field,  Value= "NumberField" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.Number, Value= "45678" },
         new TokenChecker { Token = SnLucLexer.Token.Field,  Value= "NumberField" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.Number, Value= "456.78" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,  Value= "-" },
         new TokenChecker { Token = SnLucLexer.Token.Field,  Value= "NumberField" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,  Value= "-" },
         new TokenChecker { Token = SnLucLexer.Token.Number, Value= "78.456" }
     };
     var tokens = GetTokens("NumberField:45678 NumberField:456.78 -NumberField:-78.456");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #37
0
 public void Lexer_Range_Braces()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Field,         Value= "Number" },
         new TokenChecker { Token = SnLucLexer.Token.Colon,         Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.LBrace,        Value= "{" },
         new TokenChecker { Token = SnLucLexer.Token.Number,        Value= "1234" },
         new TokenChecker { Token = SnLucLexer.Token.To,            Value= "TO" },
         new TokenChecker { Token = SnLucLexer.Token.Number,        Value= "5678" },
         new TokenChecker { Token = SnLucLexer.Token.RBrace,        Value= "}" },
     };
     var tokens = GetTokens("Number:{1234 TO 5678}");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #38
0
 public void Lexer_TwoPaths()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Plus, Value= "+" },
         new TokenChecker { Token = SnLucLexer.Token.Field, Value= "Ancestor" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "/Root/System" },
         new TokenChecker { Token = SnLucLexer.Token.Minus, Value= "-" },
         new TokenChecker { Token = SnLucLexer.Token.Field, Value= "Path" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "/Root/System" }
     };
     var tokens = GetTokens("+Ancestor:/Root/System -Path:/Root/System");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #39
0
 public void Lexer_Wildcards()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Plus,            Value= "+" },
         new TokenChecker { Token = SnLucLexer.Token.WildcardString,  Value= "startswith*" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,           Value= "-" },
         new TokenChecker { Token = SnLucLexer.Token.WildcardString,  Value= "*endswith" },
         new TokenChecker { Token = SnLucLexer.Token.WildcardString,  Value= "*contains*" },
         new TokenChecker { Token = SnLucLexer.Token.Plus,            Value= "+" },
         new TokenChecker { Token = SnLucLexer.Token.WildcardString,  Value= "starts*ends" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,           Value= "-" },
         new TokenChecker { Token = SnLucLexer.Token.WildcardString,  Value= "startswith?" },
         new TokenChecker { Token = SnLucLexer.Token.WildcardString,  Value= "?endswith" },
         new TokenChecker { Token = SnLucLexer.Token.Plus,            Value= "+" },
         new TokenChecker { Token = SnLucLexer.Token.WildcardString,  Value= "?contains?" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,           Value= "-" },
         new TokenChecker { Token = SnLucLexer.Token.WildcardString,  Value= "starts?ends" },
     };
     var tokens = GetTokens("+startswith* -*endswith *contains* +starts*ends -startswith? ?endswith +?contains? -starts?ends");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #40
0
 public void Lexer_FieldNameInQuotedString()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "contains:colon" }
     };
     var tokens = GetTokens("\"contains:colon\"");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #41
0
 public void Lexer_NonQuotedStringAndEscapes()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "contains:colon" }
     };
     var tokens = GetTokens("contains\\:colon");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #42
0
        public void Lexer_FieldBadLimiters()
        {
            var expectedTokens = new TokenChecker[]
            {
                new TokenChecker { Token = SnLucLexer.Token.Field, Value = "Field" },
                new TokenChecker { Token = SnLucLexer.Token.Colon,  Value = ":" },
                new TokenChecker { Token = SnLucLexer.Token.String,  Value = "value" },

                new TokenChecker { Token = SnLucLexer.Token.String, Value = "Field>value" },
                new TokenChecker { Token = SnLucLexer.Token.String, Value = "Field<value" },
                new TokenChecker { Token = SnLucLexer.Token.String, Value = "Field>=value" },
                new TokenChecker { Token = SnLucLexer.Token.String, Value = "Field<=value" },
                new TokenChecker { Token = SnLucLexer.Token.String, Value = "Field<>value" },
            };
            var tokens = GetTokens(" Field:value Field>value Field<value Field>=value Field<=value Field<>value ");
            var msg = CheckTokensAndEof(tokens, expectedTokens);
            Assert.IsNull(msg, msg);
        }
예제 #43
0
 public void Lexer_StringWithInnerApos()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Field,  Value = "fieldname" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "text text 'text' text" }
     };
     var tokens = GetTokens("fieldname:\"text text 'text' text\"");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #44
0
 public static TokenList Parse(string code, string[] keywords, string[] symbols, bool ignoreWhireSpaces = true, TokenChecker<char> whitespaceTokenChecker = null)
 {
     var lexer = new Lexer(keywords,symbols, ignoreWhireSpaces, whitespaceTokenChecker);
     return lexer.Parse(code);
 }
예제 #45
0
 public void Lexer_AndOrNot()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "text" },
         new TokenChecker { Token = SnLucLexer.Token.And,    Value = "AND" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "and" },
         new TokenChecker { Token = SnLucLexer.Token.Or,     Value = "OR" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "or" },
         new TokenChecker { Token = SnLucLexer.Token.Not,    Value = "NOT" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "not" },
         new TokenChecker { Token = SnLucLexer.Token.Not,    Value = "!" },
         new TokenChecker { Token = SnLucLexer.Token.And,    Value = "&&" },
         new TokenChecker { Token = SnLucLexer.Token.Or,     Value = "||" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value = "text" }
     };
     var tokens = GetTokens("text AND and OR or NOT not ! && || text");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #46
0
 public void Lexer_Groups()
 {
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Field,  Value= "Field1" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.LParen, Value= "(" },
         new TokenChecker { Token = SnLucLexer.Token.Plus,   Value= "+" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "aaa" },
         new TokenChecker { Token = SnLucLexer.Token.Plus,   Value= "+" },
         new TokenChecker { Token = SnLucLexer.Token.LParen, Value= "(" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "bbb" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "ccc" },
         new TokenChecker { Token = SnLucLexer.Token.RParen, Value= ")" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "ddd" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,  Value= "-" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "eee" },
         new TokenChecker { Token = SnLucLexer.Token.RParen, Value= ")" },
     };
     var tokens = GetTokens("Field1:(+aaa +(bbb ccc) ddd -eee)");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }
예제 #47
0
 public void Lexer_X()
 {
     //dump: BooleanQuery(Clause(Occur(), TermQuery(Term(F:a))), Clause(Occur(), BooleanQuery(Clause(Occur(+), TermQuery(Term(G:b))), Clause(Occur(-), TermQuery(Term(F:d))))))
     var expectedTokens = new TokenChecker[]
     {
         new TokenChecker { Token = SnLucLexer.Token.Field,  Value= "F" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.LParen, Value= "(" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "a" },
         new TokenChecker { Token = SnLucLexer.Token.LParen, Value= "(" },
         new TokenChecker { Token = SnLucLexer.Token.Plus,   Value= "+" },
         new TokenChecker { Token = SnLucLexer.Token.Field,  Value= "G" },
         new TokenChecker { Token = SnLucLexer.Token.Colon, Value= ":" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "b" },
         new TokenChecker { Token = SnLucLexer.Token.Minus,  Value= "-" },
         new TokenChecker { Token = SnLucLexer.Token.String, Value= "d" },
         new TokenChecker { Token = SnLucLexer.Token.RParen, Value= ")" },
         new TokenChecker { Token = SnLucLexer.Token.RParen, Value= ")" }
     };
     var tokens = GetTokens("F:(a (+G:b -d))");
     var msg = CheckTokensAndEof(tokens, expectedTokens);
     Assert.IsNull(msg, msg);
 }