public void ComplexTokenization_LongValidQuery()
        {
            var input = "android , no AND (oracl* OR C++ OR C99) NOT iphone,less OR   \"  hey  baby  *\"   AND phone ONEAR appl*, more";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "android", 1),
                new LexicalElement(LexicalElementType.And, ",", 9),
                new LexicalElement(LexicalElementType.Term, "no", 11),
                new LexicalElement(LexicalElementType.And, "AND", 14),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 18),
                new LexicalElement(LexicalElementType.Term, "oracl*", 19),
                new LexicalElement(LexicalElementType.Or, "OR", 26),
                new LexicalElement(LexicalElementType.Term, "C++", 29),
                new LexicalElement(LexicalElementType.Or, "OR", 33),
                new LexicalElement(LexicalElementType.Term, "C99", 36),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 39),
                new LexicalElement(LexicalElementType.Not, "NOT", 41),
                new LexicalElement(LexicalElementType.Term, "iphone", 45),
                new LexicalElement(LexicalElementType.And, ",", 51),
                new LexicalElement(LexicalElementType.Term, "less", 52),
                new LexicalElement(LexicalElementType.Or, "OR", 57),
                new LexicalElement(LexicalElementType.Term, "  hey  baby  *", 63),
                new LexicalElement(LexicalElementType.And, "AND", 81),
                new LexicalElement(LexicalElementType.Term, "phone", 85),
                new LexicalElement(LexicalElementType.OrderedNear, "ONEAR", 91),
                new LexicalElement(LexicalElementType.Term, "appl*", 97),
                new LexicalElement(LexicalElementType.And, ",", 102),
                new LexicalElement(LexicalElementType.Term, "more", 104)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ONearExpression2()
        {
            var input = new[]
            {
                new LexicalElement(LexicalElementType.OrderedNear, "ONEAR", 1),
                new LexicalElement(LexicalElementType.Term, "a", 2),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 3),
                new LexicalElement(LexicalElementType.Term, "b", 4),
                new LexicalElement(LexicalElementType.And, "AND", 5),
                new LexicalElement(LexicalElementType.Term, "c", 6),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 7)
            };

            var expectedOutput = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 2),
                new LexicalElement(LexicalElementType.Term, "b", 4),
                new LexicalElement(LexicalElementType.Term, "c", 6),
                new LexicalElement(LexicalElementType.And, "AND", 5),
                new LexicalElement(LexicalElementType.OrderedNear, "ONEAR", 1)
            };

            var output = Parser.GetPostfixNotation(input);

            Assert.True(TestsHelper.IsEqual(output, expectedOutput));
        }
        public void ComplexInput1()
        {
            var input = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 1),
                new LexicalElement(LexicalElementType.And, "AND", 2),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 3),
                new LexicalElement(LexicalElementType.Term, "b", 4),
                new LexicalElement(LexicalElementType.Not, "NOT", 5),
                new LexicalElement(LexicalElementType.Term, "c", 6),
                new LexicalElement(LexicalElementType.Or, "OR", 7),
                new LexicalElement(LexicalElementType.Term, "d", 8),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 9),
                new LexicalElement(LexicalElementType.Near, "NEAR", 10),
                new LexicalElement(LexicalElementType.Term, "e", 11)
            };

            var expectedOutput = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 1),
                new LexicalElement(LexicalElementType.Term, "b", 4),
                new LexicalElement(LexicalElementType.Term, "c", 6),
                new LexicalElement(LexicalElementType.Not, "NOT", 5),
                new LexicalElement(LexicalElementType.Term, "d", 8),
                new LexicalElement(LexicalElementType.Or, "OR", 7),
                new LexicalElement(LexicalElementType.Term, "e", 11),
                new LexicalElement(LexicalElementType.Near, "NEAR", 10),
                new LexicalElement(LexicalElementType.And, "AND", 2)
            };

            var output = Parser.GetPostfixNotation(input);

            Assert.True(TestsHelper.IsEqual(output, expectedOutput));
        }
        public void TermElementParameter()
        {
            var input = new[]
            {
                new LexicalElement(LexicalElementType.Term, "test", 1)
            };

            var output = Parser.GetPostfixNotation(input);

            Assert.True(TestsHelper.IsEqual(input, output));
        }
        public void OrphanNearElementParameter()
        {
            var input = new[]
            {
                new LexicalElement(LexicalElementType.Near, "NEAR", 1)
            };

            var output = Parser.GetPostfixNotation(input);

            Assert.True(TestsHelper.IsEqual(input, output));
        }
        public void ComplexTokenization_AsteriskAfterTermSucceeds()
        {
            var input = "ab*";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "ab*", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_MultipleSimpleQuotes()
        {
            var input = "a'b'c";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a'b'c", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ComplexTokenization_QuotedDoubleCommas()
        {
            var input = "\"a,,b\"";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a,,b", 2)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_Comma()
        {
            var input = ",";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.And, ",", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_RightParenthesis()
        {
            var input = ")";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_EndWithNear()
        {
            var input = "xxxnear";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "xxxnear", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_BeginWithAnd()
        {
            var input = "andxxx";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "andxxx", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_QuotedNear()
        {
            var input = "\"NEAR\"";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "NEAR", 2)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_ONear()
        {
            var input = "OnEaR";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.OrderedNear, "OnEaR", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_AndNot()
        {
            var input = "AnDnOt";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Not, "AnDnOt", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_TermWithWhiteSpaces()
        {
            var input = "a b";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a b", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_Asterisk()
        {
            var input = "*";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "*", 1)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ComplexTokenization_EndingWithComma()
        {
            var input = "a,";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 1),
                new LexicalElement(LexicalElementType.And, ",", 2)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_TermWithWhiteSpacesAndKeyword2()
        {
            var input = "a b ANDNOT c d";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a b", 1),
                new LexicalElement(LexicalElementType.Not, "ANDNOT", 5),
                new LexicalElement(LexicalElementType.Term, "c d", 12)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ComplexTokenization_TermCanContainTokens()
        {
            var input = "reimbursementAnDNoT()OrNeArdisapeard";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "reimbursementAnDNoT", 1),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 20),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 21),
                new LexicalElement(LexicalElementType.Term, "OrNeArdisapeard", 22)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ComplexTokenization_ParenthesizedCommaWithSpaces()
        {
            var input = "(a , b)";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 1),
                new LexicalElement(LexicalElementType.Term, "a", 2),
                new LexicalElement(LexicalElementType.And, ",", 4),
                new LexicalElement(LexicalElementType.Term, "b", 6),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 7)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void SimpleTokenization_ComplexSpaceSeparatedTerms1()
        {
            var input = "OR a b c AND  d  e  f  NOT";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Or, "OR", 1),
                new LexicalElement(LexicalElementType.Term, "a b c", 4),
                new LexicalElement(LexicalElementType.And, "AND", 10),
                new LexicalElement(LexicalElementType.Term, "d e f", 15),
                new LexicalElement(LexicalElementType.Not, "NOT", 24)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ComplexTokenization_DoubleQuoteAndParenthesesMix2()
        {
            var input = "a(b\"c)d\"e(f\"g)h\"i";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 1),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 2),
                new LexicalElement(LexicalElementType.Term, "b c)d e", 3),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 10),
                new LexicalElement(LexicalElementType.Term, "f g)h i", 11)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ComplexTokenization_DoubleQuoteAndParenthesesMix1()
        {
            var input = "a\"b(c\"d)e\"f(g\"h(i";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a b(c d", 1),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 8),
                new LexicalElement(LexicalElementType.Term, "e f(g h", 9),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 16),
                new LexicalElement(LexicalElementType.Term, "i", 17)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ValidSimpleParentheses()
        {
            var input = new[]
            {
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 1),
                new LexicalElement(LexicalElementType.Term, "a", 2),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 3)
            };

            var expectedOutput = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 2)
            };

            var output = Parser.GetPostfixNotation(input);

            Assert.True(TestsHelper.IsEqual(output, expectedOutput));
        }
        public void ComplexTokenization_AllTokens()
        {
            var input = "reimbursement AnD NoT Or oNeAr ( )";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.Term, "reimbursement", 1),
                new LexicalElement(LexicalElementType.And, "AnD", 15),
                new LexicalElement(LexicalElementType.Not, "NoT", 19),
                new LexicalElement(LexicalElementType.Or, "Or", 23),
                new LexicalElement(LexicalElementType.OrderedNear, "oNeAr", 26),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 32),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 34)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ONearElementParameter()
        {
            var input = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 1),
                new LexicalElement(LexicalElementType.Near, "ONEAR", 3),
                new LexicalElement(LexicalElementType.Term, "b", 8)
            };

            var expectedOutput = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 1),
                new LexicalElement(LexicalElementType.Term, "b", 8),
                new LexicalElement(LexicalElementType.Near, "ONEAR", 3)
            };

            var output = Parser.GetPostfixNotation(input);

            Assert.True(TestsHelper.IsEqual(output, expectedOutput));
        }
        public void AndNotElementParameter()
        {
            var input = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 1),
                new LexicalElement(LexicalElementType.Not, "ANDNOT", 3),
                new LexicalElement(LexicalElementType.Term, "b", 10)
            };

            var expectedOutput = new[]
            {
                new LexicalElement(LexicalElementType.Term, "a", 1),
                new LexicalElement(LexicalElementType.Term, "b", 10),
                new LexicalElement(LexicalElementType.Not, "ANDNOT", 3)
            };

            var output = Parser.GetPostfixNotation(input);

            Assert.True(TestsHelper.IsEqual(output, expectedOutput));
        }
        public void ComplexTokenization_ParenthesesAreValidSeparators()
        {
            var input = "and)not(or)near(word";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.And, "and", 1),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 4),
                new LexicalElement(LexicalElementType.Not, "not", 5),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 8),
                new LexicalElement(LexicalElementType.Or, "or", 9),
                new LexicalElement(LexicalElementType.RightParenthesis, ")", 11),
                new LexicalElement(LexicalElementType.Near, "near", 12),
                new LexicalElement(LexicalElementType.LeftParenthesis, "(", 16),
                new LexicalElement(LexicalElementType.Term, "word", 17)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }
        public void ComplexTokenization_CommasAreValidSeparators()
        {
            var input = "and,not,or,near,word";

            var expectedElements = new[]
            {
                new LexicalElement(LexicalElementType.And, "and", 1),
                new LexicalElement(LexicalElementType.And, ",", 4),
                new LexicalElement(LexicalElementType.Not, "not", 5),
                new LexicalElement(LexicalElementType.And, ",", 8),
                new LexicalElement(LexicalElementType.Or, "or", 9),
                new LexicalElement(LexicalElementType.And, ",", 11),
                new LexicalElement(LexicalElementType.Near, "near", 12),
                new LexicalElement(LexicalElementType.And, ",", 16),
                new LexicalElement(LexicalElementType.Term, "word", 17)
            };

            var elements = Lexer.Tokenize(input);

            Assert.True(TestsHelper.IsEqual(elements, expectedElements));
        }