Ejemplo n.º 1
0
		public static void AssertTokenValues(ITokenizer tokenizer, params string[] expectedValues)
		{
			foreach (string value in expectedValues)
			{
				Assertion.AssertEquals(value, tokenizer.NextToken().Value);
			}
			Assertion.AssertNull(tokenizer.NextToken());
		}
Ejemplo n.º 2
0
 public static void AssertTokenValues(ITokenizer tokenizer, params string[] expectedValues)
 {
     foreach (string value in expectedValues)
     {
         Assertion.AssertEquals(value, tokenizer.NextToken().Value);
     }
     Assertion.AssertNull(tokenizer.NextToken());
 }
        void IndexByField(IRecord record, IndexedField field)
        {
            string     value     = (string)record[field.Name];
            ITokenizer tokenizer = CreateTokenizer(value);
            Token      token     = tokenizer.NextToken();

            while (null != token)
            {
                IndexByToken(token, record, field);
                token = tokenizer.NextToken();
            }
        }
        /// <summary>
        /// Searches the index for the words included in
        /// the expression passed as argument. <br />
        /// All the fields are searched for every word
        /// in the expression.<br />
        /// </summary>
        /// <param name="expression">search expression</param>
        /// <returns>
        /// When expression.SearchMode is
        /// <see cref="FullTextSearchMode.IncludeAny"/> every
        /// record for which at least one word in the expression
        /// implies a match will be returned.<br />
        /// When expression.SearchMode is
        /// <see cref="FullTextSearchMode.IncludeAll" /> only
        /// those records for which all of the words in the expression
        /// imply a match will be returned.
        /// </returns>
        public Bamboo.Prevalence.Indexing.SearchResult Search(FullTextSearchExpression expression)
        {
            ITokenizer tokenizer = CreateTokenizer(expression.Expression);
            Token      token     = tokenizer.NextToken();

            if (null == token)
            {
                throw new ArgumentException("Invalid search expression. The expression must contain at least one valid token!", "expression");
            }

            long begin = System.Environment.TickCount;

            SearchResult result = null;

            if (expression.SearchMode == FullTextSearchMode.IncludeAny)
            {
                result = IncludeAny(tokenizer, token);
            }
            else
            {
                result = IncludeAll(tokenizer, token);
            }

            result.ElapsedTime = System.Environment.TickCount - begin;

            return(result);
        }
Ejemplo n.º 5
0
		public static void AssertTokens(ITokenizer tokenizer, params Token[] tokens)
		{
			foreach (Token expected in tokens)
			{
				Assertion.AssertEquals(expected, tokenizer.NextToken());
			}
		}
Ejemplo n.º 6
0
        public IExpression Parse(ITokenizer tokenizer)
        {
            if (tokenizer == null)
            {
                throw new ArgumentNullException(nameof(tokenizer));
            }

            var leftExpression = _expression(tokenizer);

            while (true)
            {
                tokenizer.CurrentToken.EnsureExpectedSymbol(_operationFactory);

                var operation = _createOperation(tokenizer.CurrentToken.Symbols.First());

                if (operation == null)
                {
                    return(leftExpression);
                }

                tokenizer.NextToken();

                var rightExpression = _expression(tokenizer);

                leftExpression = new BinaryExpression(leftExpression, rightExpression, operation);
            }
        }
Ejemplo n.º 7
0
        protected virtual IToken LookAheadImpl(int howFar)
        {
            if (howFar < originalFormTokens.Count)
            {
                return(originalFormTokens[howFar]);
            }
            else
            {
                for (int i = originalFormTokens.Count; i < howFar; i++)
                {
                    LookAhead(i);
                }
                if (this.originalFormTokens.Count > 0)
                {
                    var last = this.originalFormTokens[this.originalFormTokens.Count - 1];
                    this.currentTokenizer.Position = last.Position + last.Length;
                }

                CurrentTokenizer.NextToken(this.State);
                if (CurrentTokenizer.CurrentToken != null)
                {
                    originalFormTokens.Add(CurrentTokenizer.CurrentToken);
                }
                else
                {
                    return(null);
                }
                if (this.originalFormTokens.Count > 0)
                {
                    StreamPosition = this.originalFormTokens[0].Position;
                }
                return(originalFormTokens[howFar]);
            }
        }
Ejemplo n.º 8
0
 public static void AssertTokens(ITokenizer tokenizer, params Token[] tokens)
 {
     foreach (Token expected in tokens)
     {
         Assertion.AssertEquals(expected, tokenizer.NextToken());
     }
 }
Ejemplo n.º 9
0
        private static bool TryGetBracketExpression(ITokenizer tokenizer, out IExpression bracketExpression)
        {
            bracketExpression = null;

            if (tokenizer.CurrentToken.IsLeftBracketSymbol())
            {
                tokenizer.NextToken();

                bracketExpression = _expressionParser.Parse(tokenizer);

                tokenizer.CurrentToken.EnsureRightBracketSymbol();

                tokenizer.NextToken();
            }

            return(bracketExpression != null);
        }
Ejemplo n.º 10
0
 public static void SkipSymbol(this ITokenizer tokenizer, string symbol)
 {
     if (tokenizer.Current != TokenType.Symbol || tokenizer.CurrentData != symbol)
     {
         throw tokenizer.CreateParseError("DAE-00297 Expected symbol:" + symbol);
     }
     tokenizer.NextToken();
 }
Ejemplo n.º 11
0
        public static void AssertTokens(ITokenizer tokenizer, ITokenFilter filter, params Token[] tokens)
        {
            ITokenizer actual = filter.Clone(tokenizer);

            foreach (Token expected in tokens)
            {
                Assertion.AssertEquals(expected, actual.NextToken());
            }
        }
Ejemplo n.º 12
0
        private static bool IsPositiveToken(ITokenizer tokenizer)
        {
            if (tokenizer.CurrentToken.IsAddSymbol())
            {
                tokenizer.NextToken();

                return(true);
            }

            return(false);
        }
        SearchResult IncludeAny(ITokenizer tokenizer, Token token)
        {
            SearchResult result = new SearchResult();

            while (null != token)
            {
                SearchToken(result, token);
                token = tokenizer.NextToken();
            }
            return(result);
        }
Ejemplo n.º 14
0
        private static bool TyeGetNumberExpression(ITokenizer tokenizer, out IExpression numberExpression)
        {
            numberExpression = null;

            if (tokenizer.CurrentToken.Number.HasValue)
            {
                numberExpression = new NumberExpression(tokenizer.CurrentToken.Number.Value);

                tokenizer.NextToken();
            }

            return(numberExpression != null);
        }
Ejemplo n.º 15
0
        Node ParseByPriority(int priority)
        {
            if (priority == OperatorPriority.Highest)
            {
                return(ParseLeaf());
            }
            else
            {
                int                          nextPriority = _prioritiesList[_prioritiesList.IndexOf(priority) + 1];
                List <Operator>              operators    = _operationsByPriority[priority];
                IEnumerable <OperatorUnary>  unaryOps     = operators.Where((op) => (op is OperatorUnary)).Cast <OperatorUnary>();
                IEnumerable <OperatorBinary> binaryOps    = operators.Where((op) => (op is OperatorBinary)).Cast <OperatorBinary>();

                //unary operators
                foreach (var op in unaryOps)
                {
                    if (op.TokenString == _tokenizer.Token)
                    {
                        _tokenizer.NextToken();
                        Node rhs = ParseByPriority(priority);
                        return(new NodeUnary(rhs, op.Op));
                    }
                }

                //binary operators
                Node lhs = ParseByPriority(nextPriority);
                while (true)
                {
                    OperatorBinary op = binaryOps.FirstOrDefault(op => op.TokenString == _tokenizer.Token);
                    if (op == null)
                    {
                        return(lhs);
                    }
                    _tokenizer.NextToken();
                    Node rhs = ((op.Associativity == Associativity.LEFT) ? ParseByPriority(nextPriority) : ParseByPriority(priority));
                    lhs = new NodeBinary(lhs, rhs, op.Op);
                }
            }
        }
Ejemplo n.º 16
0
        private static bool TryGetNegativeExpression(ITokenizer tokenizer, out IExpression negativeExpression)
        {
            negativeExpression = null;

            if (tokenizer.CurrentToken.IsSubtractSymbol())
            {
                tokenizer.NextToken();

                var rightExpression = ParseUnary(tokenizer);

                negativeExpression = new UnaryExpression(rightExpression, a => - a);
            }

            return(negativeExpression != null);
        }
        public IEnumerable <Word> Interprete()
        {
            var result        = new Dictionary <string, Word>();
            var currentPhrase = new Phrase();
            var t             = _tokenizer.CurrentToken();
            var wasSpace      = false;

            while (t != null)
            {
                if ((t.IsSpace()) && (!wasSpace))
                {
                    currentPhrase.Text += " ";
                }
                if (!t.IsSpace())
                {
                    currentPhrase.Text += t.Text;
                }
                wasSpace = t.IsSpace();

                if (t.IsWord() && !t.Ignore)
                {
                    var w = new Word();
                    w.Text = t.Text;
                    w.AddPhrase(currentPhrase);
                    if (!TestPhrasalVerb(result, w))
                    {
                        AddToList(result, w);
                    }
                }

                if ((t.Text.Length == 1) && (PhraseEndings.Contains(t.Text)))
                {
                    currentPhrase = new Phrase();
                }


                t = _tokenizer.NextToken();
            }


            return(result.Values);
        }
        SearchResult IncludeAll(ITokenizer tokenizer, Token token)
        {
            ArrayList results = new ArrayList();

            while (null != token)
            {
                SearchResult tokenResult = new SearchResult();
                SearchToken(tokenResult, token);
                results.Add(tokenResult);

                token = tokenizer.NextToken();
            }

            SearchResult result = (SearchResult)results[0];

            for (int i = 1; i < results.Count && result.Count > 0; ++i)
            {
                result = result.Intersect((SearchResult)results[i]);
            }
            return(result);
        }
Ejemplo n.º 19
0
        /// <summary>
        /// Executes next step of parser and returns parser currentState.
        /// </summary>
        /// <returns>Parser current currentState.</returns>
        public virtual ParseMessage Parse()
        {
            while (true)
            {
                T inputToken;
                if (currentToken == null)
                {
                    //We must read a currentToken
                    T            textInputToken;
                    ParseMessage message = tokenizer.NextToken(out textInputToken);
                    if (textInputToken == null)
                    {
                        return(ParseMessage.InternalError);
                    }
                    //					Debug.WriteLine(string.Format("State: {0} Line: {1}, Column: {2}, Parse Value: {3}, Token Type: {4}", currentState.Index, inputToken.Line, inputToken.LinePosition, inputToken.Text, inputToken.symbol.Name), "Token Read");
                    if (textInputToken.Symbol.Kind != SymbolKind.End)
                    {
                        currentToken = textInputToken;
                        return(message);
                    }
                    inputToken = textInputToken;
                }
                else
                {
                    inputToken = currentToken;
                }
                switch (inputToken.Symbol.Kind)
                {
                case SymbolKind.WhiteSpace:
                case SymbolKind.CommentStart:
                case SymbolKind.CommentLine:
                    ClearCurrentToken();
                    break;

                case SymbolKind.Error:
                    return(ParseMessage.LexicalError);

                default:
                    LalrAction action = currentState.GetActionBySymbol(inputToken.Symbol);
                    if (action == null)
                    {
                        if (RetrySyntaxError(ref inputToken))
                        {
                            currentToken = inputToken;
                            continue;
                        }
                        return(ParseMessage.SyntaxError);
                    }
                    // the Execute() is the ParseToken() equivalent
                    switch (action.Execute(this, inputToken))
                    {
                    case TokenParseResult.Accept:
                        return(ParseMessage.Accept);

                    case TokenParseResult.Shift:
                        ClearCurrentToken();
                        break;

                    case TokenParseResult.SyntaxError:
                        return(ParseMessage.SyntaxError);

                    case TokenParseResult.ReduceNormal:
                        return(ParseMessage.Reduction);

                    case TokenParseResult.InternalError:
                        return(ParseMessage.InternalError);
                    }
                    break;
                }
            }
        }
		SearchResult IncludeAll(ITokenizer tokenizer, Token token)
		{
			ArrayList results = new ArrayList();
			while (null != token)
			{
				SearchResult tokenResult = new SearchResult();
				SearchToken(tokenResult, token);
				results.Add(tokenResult);

				token = tokenizer.NextToken();
			}

			SearchResult result = (SearchResult)results[0];
			for (int i=1; i<results.Count && result.Count > 0; ++i)
			{
				result = result.Intersect((SearchResult)results[i]);
			}
			return result;
		}
		SearchResult IncludeAny(ITokenizer tokenizer, Token token)
		{
			SearchResult result = new SearchResult();
			while (null != token)
			{
				SearchToken(result, token);
				token = tokenizer.NextToken();
			}
			return result;
		}
Ejemplo n.º 22
0
 protected void NextToken()
 {
     m_tokenizer.NextToken();
 }