Exemplo n.º 1
0
        private static Pair<string, int> FindScriptName(
            int start,
            ITokenStream tokens)
        {
            string lastIdent = null;
            var lastIdentIndex = 0;
            for (var i = start; i < tokens.Size; i++)
            {
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.IDENT)
                {
                    lastIdent = tokens.Get(i).Text;
                    lastIdentIndex = i;
                }

                if (tokens.Get(i).Type == EsperEPL2GrammarParser.LPAREN)
                {
                    break;
                }

                // find beginning of script, ignore brackets
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.LBRACK && tokens.Get(i + 1).Type != EsperEPL2GrammarParser.RBRACK)
                {
                    break;
                }
            }

            if (lastIdent == null)
            {
                throw new IllegalStateException("Failed to parse expression name");
            }

            return new Pair<string, int>(lastIdent, lastIdentIndex);
        }
Exemplo n.º 2
0
        private static bool IsContainsScriptOrClassExpression(ITokenStream tokens)
        {
            for (var i = 0; i < tokens.Size; i++)
            {
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.CLASSDECL) {
                    int startTokenTripleQuote = FindTokenClass(i + 1, tokens);
                    if (startTokenTripleQuote != -1) {
                        int endTokenTripleQuote = FindTokenClass(startTokenTripleQuote + 1, tokens);
                        if (endTokenTripleQuote != -1) {
                            return true;
                        }
                    }
                }
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.EXPRESSIONDECL)
                {
                    var startTokenLcurly = FindStartTokenScript(i + 1, tokens, EsperEPL2GrammarParser.LCURLY);
                    var startTokenLbrack = FindStartTokenScript(i + 1, tokens, EsperEPL2GrammarParser.LBRACK);
                    // Handle:
                    // expression ABC { some[other] }
                    // expression boolean js:doit(...) [ {} ]
                    if (startTokenLbrack != -1 && (startTokenLcurly == -1 || startTokenLcurly > startTokenLbrack))
                    {
                        return true;
                    }
                }
            }

            return false;
        }
Exemplo n.º 3
0
    /// <summary>Returns true if on the current index of the parser's
    /// token stream a token of the given type exists on the
    /// Hidden channel.
    /// </summary>
    /// <param name="type">
    /// The type of the token on the Hidden channel to check.
    /// </param>
    protected bool here(int type)
    {
        // Get the token ahead of the current index.
        int    possibleIndexEosToken = CurrentToken.TokenIndex - 1;
        IToken ahead = _input.Get(possibleIndexEosToken);

        // Check if the token resides on the Hidden channel and if it's of the
        // provided type.
        return(ahead.Channel == Lexer.Hidden && ahead.Type == type);
    }
Exemplo n.º 4
0
        private static Interval GetSourceInterval(ITree tree, ITokenStream tokenStream)
        {
            Contract.Requires(tree != null);
            Contract.Requires(tokenStream != null);

            IToken firstToken = tokenStream.Get(tree.TokenStartIndex);
            IToken lastToken  = tokenStream.Get(tree.TokenStopIndex);

            return(Interval.FromBounds(firstToken.StartIndex, lastToken.StopIndex));
        }
Exemplo n.º 5
0
        private static int FindEndTokenScript(
            int startIndex,
            ITokenStream tokens,
            int tokenTypeSearch,
            ISet<int> afterScriptTokens,
            bool requireAfterScriptToken)
        {
            // The next non-comment token must be among the afterScriptTokens, i.e. SELECT/INSERT/ON/DELETE/UPDATE
            // Find next non-comment token.
            if (requireAfterScriptToken)
            {
                var found = -1;
                for (var i = startIndex; i < tokens.Size; i++)
                {
                    if (tokens.Get(i).Type == tokenTypeSearch)
                    {
                        for (var j = i + 1; j < tokens.Size; j++)
                        {
                            var next = tokens.Get(j);
                            if (next.Channel == 0)
                            {
                                if (afterScriptTokens.Contains(next.Type))
                                {
                                    found = i;
                                }

                                break;
                            }
                        }
                    }

                    if (found != -1)
                    {
                        break;
                    }
                }

                return found;
            }

            // Find the last token
            var indexLast = -1;
            for (var i = startIndex; i < tokens.Size; i++)
            {
                if (tokens.Get(i).Type == tokenTypeSearch)
                {
                    indexLast = i;
                }
            }

            return indexLast;
        }
Exemplo n.º 6
0
        private TokenLocation GetLocation(ISyntaxTree node)
        {
            var interval = node.SourceInterval;

            if (interval.Length == 0)
            {
                return(new TokenLocation(tokenStream.Get(0), tokenStream.Get(0)));
            }
            var start = tokenStream.Get(interval.a);
            var end   = tokenStream.Get(interval.b);

            return(new TokenLocation(start, end));
        }
        /// <summary>
        /// Output all unprocessed whitespace tokens in the given token stream index range
        /// </summary>
        protected void WriteWhiteSpace(int minIndex, int maxIndex)
        {
            IToken token;

            for (int index = minIndex; index <= maxIndex; index++)
            {
                token = _tokens.Get(index);

                if (IsWhiteSpace(token))
                {
                    WriteWhitespaceToken(token);
                }
            }
        }
Exemplo n.º 8
0
 public static IEnumerable <IToken> AllTokens(ITokenStream ts)
 {
     for (int i = 0; i < ts.Size; ++i)
     {
         yield return(ts.Get(i));
     }
 }
Exemplo n.º 9
0
        public static string MakeMessage(Module module, IParseTree place, ITokenStream tokens, string msg)
        {
            var interval = place.SourceInterval;
            var begin    = tokens.Get(interval.a);

            return(MakeMessage(module.file_path, begin.Line, begin.Column, msg));
        }
Exemplo n.º 10
0
    /** Find stop token index of next operator; return -1 if not operator. */

    public static int getLastOpTokenIndex(ITokenStream tokens)
    {
        var i  = tokens.Index; // current on-channel lookahead token index
        var lt = tokens.Get(i);

        if (lt.getType() == SwiftParser.DOT && tokens.get(i + 1).getType() == SwiftParser.DOT)
        {
            // dot-operator
            i += 2; // point at token after ".."
            lt = tokens.get(i);
            while (lt.getType() != Token.EOF &&
                   (lt.getType() == SwiftParser.DOT || isOperatorChar(lt.getType())))
            {
                i++;
                lt = tokens.get(i);
            }
            return(i - 1);
        }
        // Is it regular operator?
        if (!isOperatorHead(lt.getType()))
        {
            return(-1);
        }
        i++;
        lt = tokens.get(i);
        while (lt.getType() != Token.EOF && isOperatorChar(lt.getType()))
        {
            i++;
            lt = tokens.get(i);
        }
        int stop = i - 1;

        return(stop);
    }
Exemplo n.º 11
0
 public virtual void Emit(CommonTree opAST, Bytecode opcode)
 {
     EnsureCapacity(1);
     if (opAST != null)
     {
         int i = opAST.TokenStartIndex;
         int j = opAST.TokenStopIndex;
         int p = tokens.Get(i).StartIndex;
         int q = tokens.Get(j).StopIndex;
         if (!(p < 0 || q < 0))
         {
             impl.sourceMap[ip] = Interval.FromBounds(p, q + 1);
         }
     }
     impl.instrs[ip++] = (byte)opcode;
 }
Exemplo n.º 12
0
            public override string ToString()
            {
                string opName = GetType().FullName;
                int    index  = opName.IndexOf('$');

                opName = Sharpen.Runtime.Substring(opName, index + 1, opName.Length);
                return("<" + opName + "@" + tokens.Get(this.index) + ":\"" + text + "\">");
            }
Exemplo n.º 13
0
        IToken ScanForLastToken(int type, int start, ITokenStream TokenStream, out int iLast)
        {
            var lastFound = TokenStream.Get(start);
            int iLine     = lastFound.Line;

            iLast = start;
            IToken nextToken  = lastFound;
            IToken nextToken2 = lastFound;

            for (int i = start + 1; i < TokenStream.Size - 2; i++)
            {
                nextToken  = TokenStream.Get(i);
                nextToken2 = TokenStream.Get(i + 2);  // STATIC <WS> DEFINE for example.
                if (nextToken.Line > iLine)
                {
                    if (nextToken.Type == type || (nextToken2.Type == type && nextToken.Type == XSharpLexer.STATIC))
                    {
                        lastFound = nextToken;
                        iLine     = nextToken.Line;
                        iLast     = i;
                    }
                    else if (nextToken.Type != XSharpLexer.WS)
                    {
                        break;
                    }
                }
            }
            nextToken = lastFound;
            for (int i = iLast; i < TokenStream.Size; i++)
            {
                nextToken = TokenStream.Get(i);
                if (nextToken.Line == lastFound.Line &&
                    nextToken.Type != XSharpLexer.NL &&
                    nextToken.Type != XSharpLexer.EOS)
                {
                    lastFound = nextToken;
                }
                else
                {
                    break;
                }
            }
            return(lastFound);
        }
Exemplo n.º 14
0
        private static int FindTokenClass(
            int startIndex,
            ITokenStream tokens)
        {
            int found = -1;
            for (int i = startIndex; i < tokens.Size; i++) {
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.TRIPLEQUOTE) {
                    return i;
                }
            }

            return found;
        }
Exemplo n.º 15
0
        public virtual void Consume()
        {
            if (initialStreamState)
            {
                ConsumeInitialHiddenTokens();
            }
            int    a = input.Index;
            IToken t = input.LT(1);

            input.Consume();
            int b = input.Index;

            dbg.ConsumeToken(t);
            if (b > a + 1)
            {
                // then we consumed more than one token; must be off channel tokens
                for (int i = a + 1; i < b; i++)
                {
                    dbg.ConsumeHiddenToken(input.Get(i));
                }
            }
        }
Exemplo n.º 16
0
        public static Range GetRange(this CommonTree node, ITokenStream tokenStream, IReadOnlyScriptText scriptText)
        {
            if (node is CommonErrorNode errorNode)
            {
                return(new Range()
                {
                    Start = scriptText.PositionAt(((CommonToken)tokenStream.Get(errorNode.start.TokenIndex)).StartIndex),
                    End = scriptText.PositionAt(((CommonToken)tokenStream.Get(errorNode.stop.TokenIndex)).StopIndex + 1)
                });
            }

            if (node.TokenStartIndex == -1 || node.TokenStopIndex == -1)
            {
                return(Range.Empty);
            }

            return(new Range()
            {
                Start = scriptText.PositionAt(((CommonToken)tokenStream.Get(node.TokenStartIndex)).StartIndex),
                End = scriptText.PositionAt(((CommonToken)tokenStream.Get(node.TokenStopIndex)).StopIndex + 1)
            });
        }
Exemplo n.º 17
0
    public static boolean isOpNext(ITokenStream tokens)
    {
        var start = tokens.Index;
        var lt    = tokens.Get(start);
        var stop  = getLastOpTokenIndex(tokens);

        if (stop == -1)
        {
            return(false);
        }
        Console.WriteLine("isOpNext: i={0} t='{1}'", start, lt.getText());
        Console.WriteLine(", op='{0}'\n", tokens.GetText(Interval.Of(start, stop)));
        return(true);
    }
Exemplo n.º 18
0
        /** <summary>Get num hidden tokens between i..j inclusive</summary> */
        public virtual int GetNumberOfHiddenTokens(int i, int j)
        {
            int          n     = 0;
            ITokenStream input = parser.TokenStream;

            for (int ti = i; ti < input.Size() && ti <= j; ti++)
            {
                IToken t = input.Get(ti);
                if (t.Channel != TokenConstants.DEFAULT_CHANNEL)
                {
                    n++;
                }
            }
            return(n);
        }
Exemplo n.º 19
0
        /** Get num hidden tokens between i..j inclusive */
        public virtual int GetNumberOfHiddenTokens(int i, int j)
        {
            int          n     = 0;
            ITokenStream input = parser.TokenStream;

            for (int ti = i; ti < input.Count && ti <= j; ti++)
            {
                IToken t = input.Get(ti);
                if (t.Channel != TokenChannels.Default)
                {
                    n++;
                }
            }
            return(n);
        }
Exemplo n.º 20
0
        /// <summary>
        /// Input an expression token in tree structure, return a single token representing the whole expression
        /// </summary>
        /// <param name="rootToken"></param>
        /// <param name="input"></param>
        /// <returns></returns>
        public static IToken GetExpressionToken(CommonTree rootToken, ITreeNodeStream input)
        {
            try
            {
                ITokenStream stream = ((Antlr.Runtime.Tree.CommonTreeNodeStream)(input)).TokenStream;

                int    start  = rootToken.TokenStartIndex;
                int    end    = rootToken.TokenStopIndex;
                IToken token1 = new CommonToken(); //(Token.DEFAULT_CHANNEL
                token1.CharPositionInLine = stream.Get(start).CharPositionInLine;
                token1.Line = stream.Get(start).Line;

                for (int i = start; i <= end; i++)
                {
                    token1.Text += stream.Get(i).Text;
                }
                return(token1);
            }
            catch (Exception)
            {
            }

            return(rootToken.Token);
        }
Exemplo n.º 21
0
        private static int FindStartTokenScript(
            int startIndex,
            ITokenStream tokens,
            int tokenTypeSearch)
        {
            var found = -1;
            for (var i = startIndex; i < tokens.Size; i++)
            {
                if (tokens.Get(i).Type == tokenTypeSearch)
                {
                    return i;
                }
            }

            return found;
        }
Exemplo n.º 22
0
        public IToken GetToken(int index)
        {
            ITokenStream tokenStream = TokenStream;

            if (tokenStream == null)
            {
                return(null);
            }

            if (index < 0 || index >= tokenStream.Count)
            {
                return(null);
            }

            return(_tokenStream.Get(index));
        }
Exemplo n.º 23
0
        public TokenStreamVisualizerForm(ITokenStream tokenStream)
        {
            if (tokenStream == null)
            {
                throw new ArgumentNullException("tokenStream");
            }

            InitializeComponent();

            List <IToken> tokens = new List <IToken>();

            int marker          = tokenStream.Mark();
            int currentPosition = tokenStream.Index;

            try
            {
                tokenStream.Seek(0);
                while (tokenStream.LA(1) != CharStreamConstants.EndOfFile)
                {
                    tokenStream.Consume();
                }

                for (int i = 0; i < tokenStream.Count; i++)
                {
                    tokens.Add(tokenStream.Get(i));
                }
            }
            finally
            {
                tokenStream.Rewind(marker);
            }

            this._tokenStream = tokenStream;
            this._tokens      = tokens.ToArray();

            if (tokenStream.TokenSource != null)
            {
                this._tokenNames = tokenStream.TokenSource.TokenNames;
            }

            this._tokenNames = this._tokenNames ?? new string[0];

            UpdateTokenTypes();
            UpdateHighlighting();

            listBox1.BackColor = Color.Wheat;
        }
Exemplo n.º 24
0
        private static IToken GetTokenBefore(
            int i,
            ITokenStream tokens)
        {
            var position = i - 1;
            while (position >= 0)
            {
                var t = tokens.Get(position);
                if (t.Channel != 99 && t.Type != EsperEPL2GrammarLexer.WS)
                {
                    return t;
                }

                position--;
            }

            return null;
        }
        public TokenStreamVisualizerForm( ITokenStream tokenStream )
        {
            if (tokenStream == null)
                throw new ArgumentNullException("tokenStream");

            InitializeComponent();

            List<IToken> tokens = new List<IToken>();

            int marker = tokenStream.Mark();
            int currentPosition = tokenStream.Index;
            try
            {
                tokenStream.Seek(0);
                while (tokenStream.LA(1) != CharStreamConstants.EndOfFile)
                    tokenStream.Consume();

                for (int i = 0; i < tokenStream.Count; i++)
                    tokens.Add(tokenStream.Get(i));
            }
            finally
            {
                tokenStream.Rewind(marker);
            }

            this._tokenStream = tokenStream;
            this._tokens = tokens.ToArray();

            if (tokenStream.TokenSource != null)
                this._tokenNames = tokenStream.TokenSource.TokenNames;

            this._tokenNames = this._tokenNames ?? new string[0];

            UpdateTokenTypes();
            UpdateHighlighting();

            listBox1.BackColor = Color.Wheat;
        }
Exemplo n.º 26
0
        public override void EnterDecision(int decisionNumber, bool couldBacktrack)
        {
            lastRealTokenTouchedInDecision = null;
            stats.numDecisionEvents++;
            int          startingLookaheadIndex = parser.TokenStream.Index;
            ITokenStream input = parser.TokenStream;

            if (dump)
            {
                Console.WriteLine("enterDecision canBacktrack=" + couldBacktrack + " " + decisionNumber +
                                  " backtrack depth " + backtrackDepth +
                                  " @ " + input.Get(input.Index) +
                                  " rule " + LocationDescription());
            }
            string             g          = currentGrammarFileName.Peek();
            DecisionDescriptor descriptor = decisions.Get(g, decisionNumber);

            if (descriptor == null)
            {
                descriptor = new DecisionDescriptor();
                decisions.Put(g, decisionNumber, descriptor);
                descriptor.decision       = decisionNumber;
                descriptor.fileName       = currentGrammarFileName.Peek();
                descriptor.ruleName       = currentRuleName.Peek();
                descriptor.line           = currentLine.Peek();
                descriptor.pos            = currentPos.Peek();
                descriptor.couldBacktrack = couldBacktrack;
            }
            descriptor.n++;

            DecisionEvent d = new DecisionEvent();

            decisionStack.Push(d);
            d.decision   = descriptor;
            d.startTime  = DateTime.Now;
            d.startIndex = startingLookaheadIndex;
        }
        public override LessNode VisitSelectorElement(LessParser.SelectorElementContext context)
        {
            IEnumerable <IdentifierPart> GetIdentifierParts()
            {
                string prefix = context.HASH()?.GetText()
                                ?? context.DOT()?.GetText();

                foreach (var identifierPart in this.GetIdentifierParts(prefix, context.identifier()))
                {
                    yield return(identifierPart);
                }
            }

            Identifier GetPseudoclassIdentifier()
            {
                var pseudo = context.pseudoClass();

                if (pseudo == null)
                {
                    return(null);
                }

                string prefix = pseudo.COLON()?.GetText()
                                ?? pseudo.COLONCOLON()?.GetText()
                                ?? "";

                return(new Identifier(new PseudoclassIdentifierPart(prefix, pseudo.pseudoclassIdentifier().GetText(),
                                                                    (Expression)pseudo.expression()?.Accept(this))));
            }

            Identifier GetIdentifier()
            {
                return(new Identifier(GetIdentifierParts()));
            }

            SelectorElement GetElement(bool hasTrailingWhitespace)
            {
                var parentSelector = context.parentSelectorReference();

                if (parentSelector != null)
                {
                    return(new ParentReferenceSelectorElement(hasTrailingWhitespace));
                }

                if (context.pseudoClass() != null)
                {
                    return(new IdentifierSelectorElement(GetPseudoclassIdentifier(), hasTrailingWhitespace));
                }

                if (context.identifier() != null)
                {
                    return(new IdentifierSelectorElement(GetIdentifier(), hasTrailingWhitespace));
                }

                var attrib = context.attrib();

                if (attrib != null)
                {
                    var identifier = (Identifier)attrib.identifier().Accept(this);

                    var op = attrib.attribRelate();
                    if (op != null)
                    {
                        return(new AttributeSelectorElement(identifier, op.GetText(), (Expression)attrib.attribValue().Accept(this),
                                                            hasTrailingWhitespace));
                    }

                    return(new AttributeSelectorElement(identifier, hasTrailingWhitespace));
                }

                // The lexer rules might match an ID selector as a color, so we account for that here
                if (context.HexColor() != null)
                {
                    return(new IdentifierSelectorElement(new Identifier(new ConstantIdentifierPart(context.HexColor().GetText())),
                                                         hasTrailingWhitespace));
                }

                return(new CombinatorSelectorElement(context.combinator().GetText(), hasTrailingWhitespace));
            }

            bool HasTrailingWhitespace()
            {
                int index = context.Stop.TokenIndex + 1;

                while (index < tokenStream.Size)
                {
                    var tokenType = tokenStream.Get(index).Type;
                    if (tokenType == LessLexer.WS)
                    {
                        return(true);
                    }

                    if (tokenType != LessLexer.COMMENT)
                    {
                        return(false);
                    }

                    index++;
                }

                return(false);
            }

            return(GetElement(HasTrailingWhitespace()));
        }
Exemplo n.º 28
0
        private void BuildColorClassifications(ITokenStream tokenStream, ITextSnapshot snapshot)
        {
            Debug("Start building Classifications at {0}, version {1}", DateTime.Now, snapshot.Version.ToString());
            XClassificationSpans newtags;
            var regionTags = new List <ClassificationSpan>();

            if (tokenStream != null)
            {
                int iLastInclude    = -1;
                int iLastPPDefine   = -1;
                int iLastDefine     = -1;
                int iLastSLComment  = -1;
                int iLastDocComment = -1;
                int iLastUsing      = -1;
                newtags        = new XClassificationSpans();
                keywordContext = null;
                for (var iToken = 0; iToken < tokenStream.Size; iToken++)
                {
                    var token = tokenStream.Get(iToken);
                    // Orphan End ?
                    if ((keywordContext != null) && (keywordContext.Line != token.Line) && (keywordContext.Type == XSharpLexer.END))
                    {
                        newtags.Add(Token2ClassificationSpan(keywordContext, snapshot, xsharpKwCloseType));
                        keywordContext = null;
                    }
                    var span = ClassifyToken(token, regionTags, snapshot);
                    if (span != null)
                    {
                        newtags.Add(span);
                        // We can have some Open/Close keyword ( FOR..NEXT; WHILE...ENDDO; IF...ENDIF)
                        if (span.ClassificationType == xsharpKeywordType)
                        {
                            span = ClassifyKeyword(token, snapshot);
                            if (span != null)
                            {
                                newtags.Add(span);
                            }
                        }
                        if (!disableRegions)
                        {
                            // now look for Regions of similar code lines
                            switch (token.Type)
                            {
                            case XSharpLexer.PP_INCLUDE:
                                scanForRegion(token, iToken, tokenStream, ref iLastInclude, snapshot, regionTags);
                                break;

                            case XSharpLexer.PP_DEFINE:
                                scanForRegion(token, iToken, tokenStream, ref iLastPPDefine, snapshot, regionTags);
                                break;

                            case XSharpLexer.DEFINE:
                                scanForRegion(token, iToken, tokenStream, ref iLastDefine, snapshot, regionTags);
                                break;

                            case XSharpLexer.SL_COMMENT:
                                scanForRegion(token, iToken, tokenStream, ref iLastSLComment, snapshot, regionTags);
                                break;

                            case XSharpLexer.DOC_COMMENT:
                                scanForRegion(token, iToken, tokenStream, ref iLastDocComment, snapshot, regionTags);
                                break;

                            case XSharpLexer.USING:
                                scanForRegion(token, iToken, tokenStream, ref iLastUsing, snapshot, regionTags);
                                break;

                            default:
                                break;
                            }
                        }
                    }
                }
                // Orphan End ?
                if ((keywordContext != null) && (keywordContext.Type == XSharpLexer.END))
                {
                    newtags.Add(Token2ClassificationSpan(keywordContext, snapshot, xsharpKwCloseType));
                    keywordContext = null;
                }
            }
            else
            {
                newtags = _colorTags;
            }
            System.Diagnostics.Trace.WriteLine("-->> XSharpClassifier.BuildColorClassifications()");
            lock (gate)
            {
                _snapshot     = snapshot;
                _colorTags    = newtags;
                _lexerRegions = regionTags;
            }
            System.Diagnostics.Trace.WriteLine("<<-- XSharpClassifier.BuildColorClassifications()");
            Debug("End building Classifications at {0}, version {1}", DateTime.Now, snapshot.Version.ToString());
            triggerRepaint(snapshot);
        }
Exemplo n.º 29
0
        public virtual string GetText(string programName, Interval interval)
        {
            IList <TokenStreamRewriter.RewriteOperation> rewrites;

            if (!programs.TryGetValue(programName, out rewrites))
            {
                rewrites = null;
            }

            int start = interval.a;
            int stop  = interval.b;

            // ensure start/end are in range
            if (stop > tokens.Size - 1)
            {
                stop = tokens.Size - 1;
            }
            if (start < 0)
            {
                start = 0;
            }
            if (rewrites == null || rewrites.Count == 0)
            {
                return(tokens.GetText(interval));
            }
            // no instructions to execute
            StringBuilder buf = new StringBuilder();
            // First, optimize instruction stream
            IDictionary <int, TokenStreamRewriter.RewriteOperation> indexToOp = ReduceToSingleOperationPerIndex(rewrites);
            // Walk buffer, executing instructions and emitting tokens
            int i = start;

            while (i <= stop && i < tokens.Size)
            {
                TokenStreamRewriter.RewriteOperation op;
                if (indexToOp.TryGetValue(i, out op))
                {
                    indexToOp.Remove(i);
                }

                // remove so any left have index size-1
                IToken t = tokens.Get(i);
                if (op == null)
                {
                    // no operation at that index, just dump token
                    if (t.Type != TokenConstants.EOF)
                    {
                        buf.Append(t.Text);
                    }
                    i++;
                }
                else
                {
                    // move to next token
                    i = op.Execute(buf);
                }
            }
            // execute operation and skip
            // include stuff after end if it's last index in buffer
            // So, if they did an insertAfter(lastValidIndex, "foo"), include
            // foo if end==lastValidIndex.
            if (stop == tokens.Size - 1)
            {
                // Scan any remaining operations after last token
                // should be included (they will be inserts).
                foreach (TokenStreamRewriter.RewriteOperation op in indexToOp.Values)
                {
                    if (op.index >= tokens.Size - 1)
                    {
                        buf.Append(op.text);
                    }
                }
            }
            return(buf.ToString());
        }
Exemplo n.º 30
0
 public static boolean isOpNext(ITokenStream tokens) {
     var start = tokens.Index;
     var lt = tokens.Get(start);
     var stop = getLastOpTokenIndex(tokens);
     if (stop == -1) return false;
     Console.WriteLine("isOpNext: i={0} t='{1}'", start, lt.getText());
     Console.WriteLine(", op='{0}'\n", tokens.GetText(Interval.Of(start, stop)));
     return true;
 }
Exemplo n.º 31
0
    /** Find stop token index of next operator; return -1 if not operator. */

    public static int getLastOpTokenIndex(ITokenStream tokens) {
        var i = tokens.Index; // current on-channel lookahead token index
        var lt = tokens.Get(i);
        if (lt.getType() == SwiftParser.DOT && tokens.get(i + 1).getType() == SwiftParser.DOT) {
            // dot-operator
            i += 2; // point at token after ".."
            lt = tokens.get(i);
            while (lt.getType() != Token.EOF &&
                   (lt.getType() == SwiftParser.DOT || isOperatorChar(lt.getType()))) {
                i++;
                lt = tokens.get(i);
            }
            return i - 1;
        }
        // Is it regular operator?
        if (!isOperatorHead(lt.getType())) {
            return -1;
        }
        i++;
        lt = tokens.get(i);
        while (lt.getType() != Token.EOF && isOperatorChar(lt.getType())) {
            i++;
            lt = tokens.get(i);
        }
        int stop = i - 1;
        return stop;
    }
Exemplo n.º 32
0
        protected NoViableAltException NoViableAlt(ITokenStream input,
												ParserRuleContext outerContext,
												ATNConfigSet configs,
												int startIndex)
        {
            return new NoViableAltException(parser, input,
                                                input.Get(startIndex),
                                                input.LT(1),
                                                configs, outerContext);
        }
Exemplo n.º 33
0
        /// <summary>
        /// Converts from a syntax error to a nice exception.
        /// </summary>
        /// <param name="e">is the syntax error</param>
        /// <param name="expression">is the expression text</param>
        /// <param name="parser">the parser that parsed the expression</param>
        /// <param name="addPleaseCheck">indicates to add "please check" paraphrases</param>
        /// <returns>syntax exception</returns>
        public static UniformPair <string> Convert(RecognitionException e, string expression, bool addPleaseCheck, EsperEPL2GrammarParser parser)
        {
            string message;

            if (expression.Trim().Length == 0)
            {
                message = "Unexpected " + END_OF_INPUT_TEXT;
                return(new UniformPair <string>(message, expression));
            }

            IToken t;
            IToken tBeforeBefore = null;
            IToken tBefore       = null;
            IToken tAfter        = null;

            ITokenStream tokenStream = parser.InputStream as ITokenStream;

            var tIndex = e.OffendingToken != null ? e.OffendingToken.TokenIndex : int.MaxValue;

            if (tIndex < tokenStream.Size)
            {
                t = tokenStream.Get(tIndex);
                if ((tIndex + 1) < tokenStream.Size)
                {
                    tAfter = tokenStream.Get(tIndex + 1);
                }
                if (tIndex - 1 >= 0)
                {
                    tBefore = tokenStream.Get(tIndex - 1);
                }
                if (tIndex - 2 >= 0)
                {
                    tBeforeBefore = tokenStream.Get(tIndex - 2);
                }
            }
            else
            {
                if (tokenStream.Size >= 1)
                {
                    tBeforeBefore = tokenStream.Get(tokenStream.Size - 1);
                }
                if (tokenStream.Size >= 2)
                {
                    tBefore = tokenStream.Get(tokenStream.Size - 2);
                }
                t = tokenStream.Get(tokenStream.Size - 1);
            }

            IToken tEnd = null;

            if (tokenStream.Size > 0)
            {
                tEnd = tokenStream.Get(tokenStream.Size - 1);
            }

            var positionInfo = GetPositionInfo(t);
            var token        = t.Type == EsperEPL2GrammarParser.Eof ? "end-of-input" : "'" + t.Text + "'";

            var stack    = parser.GetParaphrases();
            var check    = "";
            var isSelect = stack.Count == 1 && stack.Peek().Equals("select clause");

            if ((stack.Count > 0) && addPleaseCheck)
            {
                var delimiter = "";
                var checkList = new StringBuilder();
                checkList.Append(", please check the ");
                while (stack.Count != 0)
                {
                    checkList.Append(delimiter);
                    checkList.Append(stack.Pop());
                    delimiter = " within the ";
                }
                check = checkList.ToString();
            }

            // check if token is a reserved keyword
            var keywords        = parser.GetKeywords();
            var reservedKeyword = false;

            if (keywords.Contains(token.ToLower()))
            {
                token          += " (a reserved keyword)";
                reservedKeyword = true;
            }
            else if (tAfter != null && keywords.Contains("'" + tAfter.Text.ToLower() + "'"))
            {
                token          += " ('" + tAfter.Text + "' is a reserved keyword)";
                reservedKeyword = true;
            }
            else
            {
                if ((tBefore != null) &&
                    (tAfter != null) &&
                    (keywords.Contains("'" + tBefore.Text.ToLower() + "'")) &&
                    (keywords.Contains("'" + tAfter.Text.ToLower() + "'")))
                {
                    token          += " ('" + tBefore.Text + "' and '" + tAfter.Text + "' are a reserved keyword)";
                    reservedKeyword = true;
                }
                else if ((tBefore != null) &&
                         (keywords.Contains("'" + tBefore.Text.ToLower() + "'")))
                {
                    token          += " ('" + tBefore.Text + "' is a reserved keyword)";
                    reservedKeyword = true;
                }
                else if (tEnd != null && keywords.Contains("'" + tEnd.Text.ToLower() + "'"))
                {
                    token          += " ('" + tEnd.Text + "' is a reserved keyword)";
                    reservedKeyword = true;
                }
            }

            // special handling for the select-clause "as" keyword, which is required
            if (isSelect && !reservedKeyword)
            {
                check += GetSelectClauseAsText(tBeforeBefore, t);
            }

            message = "Incorrect syntax near " + token + positionInfo + check;
            if (e is NoViableAltException || e is LexerNoViableAltException || CheckForInputMismatchWithNoExpected(e))
            {
                var nvaeToken     = e.OffendingToken;
                var nvaeTokenType = nvaeToken != null ? nvaeToken.Type : EsperEPL2GrammarLexer.Eof;

                if (nvaeTokenType == EsperEPL2GrammarLexer.Eof)
                {
                    if (token.Equals(END_OF_INPUT_TEXT))
                    {
                        message = "Unexpected " + END_OF_INPUT_TEXT + positionInfo + check;
                    }
                    else
                    {
                        if (ParseHelper.HasControlCharacters(expression))
                        {
                            message = "Unrecognized control characters found in text" + positionInfo;
                        }
                        else
                        {
                            message = "Unexpected " + END_OF_INPUT_TEXT + " near " + token + positionInfo + check;
                        }
                    }
                }
                else
                {
                    var parserTokenParaphrases = EsperEPL2GrammarParser.GetParserTokenParaphrases();
                    if (parserTokenParaphrases.Get(nvaeTokenType) != null)
                    {
                        message = "Incorrect syntax near " + token + positionInfo + check;
                    }
                    else
                    {
                        // find next keyword in the next 3 tokens
                        var currentIndex = tIndex + 1;
                        while ((currentIndex > 0) &&
                               (currentIndex < tokenStream.Size - 1) &&
                               (currentIndex < tIndex + 3))
                        {
                            IToken next = tokenStream.Get(currentIndex);
                            currentIndex++;

                            var quotedToken = "'" + next.Text + "'";
                            if (parser.GetKeywords().Contains(quotedToken))
                            {
                                check += " near reserved keyword '" + next.Text + "'";
                                break;
                            }
                        }
                        message = "Incorrect syntax near " + token + positionInfo + check;
                    }
                }
            }
            else if (e is InputMismatchException)
            {
                var mismatched = (InputMismatchException)e;

                string expected;
                var    expectedTokens = mismatched.GetExpectedTokens().ToList();
                if (expectedTokens.Count > 1)
                {
                    var writer = new StringWriter();
                    writer.Write("any of the following tokens {");
                    var delimiter = "";
                    for (var i = 0; i < expectedTokens.Count; i++)
                    {
                        writer.Write(delimiter);
                        if (i > 5)
                        {
                            writer.Write("...");
                            writer.Write(expectedTokens.Count - 5);
                            writer.Write(" more");
                            break;
                        }
                        delimiter = ", ";
                        writer.Write(GetTokenText(parser, expectedTokens[i]));
                    }
                    writer.Write("}");
                    expected = writer.ToString();
                }
                else
                {
                    expected = GetTokenText(parser, expectedTokens[0]);
                }

                var offendingTokenType = mismatched.OffendingToken.Type;
                var unexpected         = GetTokenText(parser, offendingTokenType);

                var expecting = " expecting " + expected.Trim() + " but found " + unexpected.Trim();
                message = "Incorrect syntax near " + token + expecting + positionInfo + check;
            }

            return(new UniformPair <string>(message, expression));
        }
Exemplo n.º 34
0
 public static IToken get(this ITokenStream stream, int i)
 {
     return(stream.Get(i));
 }