Ejemplo n.º 1
0
        private static int FindEndTokenScript(int startIndex, CommonTokenStream tokens, int tokenTypeSearch, ISet <int> afterScriptTokens, bool requireAfterScriptToken)
        {
            var found = -1;

            for (var i = startIndex; i < tokens.Size; i++)
            {
                if (tokens.Get(i).Type == tokenTypeSearch)
                {
                    if (!requireAfterScriptToken)
                    {
                        return(i);
                    }
                    // The next non-comment token must be among the afterScriptTokens, i.e. SELECT/INSERT/ON/DELETE/UPDATE
                    // Find next non-comment token.
                    for (var j = i + 1; j < tokens.Size; j++)
                    {
                        var next = tokens.Get(j);
                        if (next.Channel == 0)
                        {
                            if (afterScriptTokens.Contains(next.Type))
                            {
                                found = i;
                            }
                            break;
                        }
                    }
                }
                if (found != -1)
                {
                    break;
                }
            }
            return(found);
        }
Ejemplo n.º 2
0
        private static Pair <String, int?> FindScriptName(int start, CommonTokenStream tokens)
        {
            String lastIdent      = null;
            var    lastIdentIndex = 0;

            for (var i = start; i < tokens.Size; i++)
            {
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.IDENT)
                {
                    lastIdent      = tokens.Get(i).Text;
                    lastIdentIndex = i;
                }
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.LPAREN)
                {
                    break;
                }
                // find beginning of script, ignore brackets
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.LBRACK && tokens.Get(i + 1).Type != EsperEPL2GrammarParser.RBRACK)
                {
                    break;
                }
            }
            if (lastIdent == null)
            {
                throw new IllegalStateException("Failed to parse expression name");
            }
            return(new Pair <String, int?>(lastIdent, lastIdentIndex));
        }
Ejemplo n.º 3
0
        public void PostProcess()
        {
            // Extract comments at the end of file if any.
            if (_rootModule.LastScanTokenIndex > _parentCodeBlock.ParserContext.Stop.TokenIndex)
            {
                return; // Already scanned in child node.
            }
            for (int i = _rootModule.LastScanTokenIndex; i <= _parentCodeBlock.ParserContext.Stop.TokenIndex; i++)
            {
                var token = _commonTokenStream.Get(i);
                switch (token.Type)
                {
                case VisualBasic6Parser.NEWLINE:
                    _parentCodeBlock.AddCodeModel(new VBNewLine(token));
                    break;

                case VisualBasic6Parser.COMMENT:
                    var isOnNewLine = true;
                    if (token.TokenIndex > 0)
                    {
                        isOnNewLine = (_commonTokenStream.Get(token.TokenIndex - 1).Type == VisualBasic6Parser.NEWLINE);
                    }
                    _parentCodeBlock.AddCodeModel(new VBComment(token, isOnNewLine));
                    break;

                default:
                    // ignore the rest
                    break;
                }
            }
            _rootModule.LastScanTokenIndex = _parentCodeBlock.ParserContext.Stop.TokenIndex + 1;
        }
Ejemplo n.º 4
0
        private static ScriptOrClassResult RewriteTokensScript(CommonTokenStream tokens)
        {
            IList<string> scripts = new List<string>();
            IList<string> classes = new List<string>();

            IList<UniformPair<int>> tokenIndexRanges = new List<UniformPair<int>>();
            int tokenIndex = 0;

            while (tokenIndex < tokens.Size) {
                if (tokens.Get(tokenIndex).Type == EsperEPL2GrammarParser.EXPRESSIONDECL) {
                    var tokenBefore = GetTokenBefore(tokenIndex, tokens);
                    var isCreateExpressionClause = tokenBefore != null && tokenBefore.Type == EsperEPL2GrammarParser.CREATE;
                    var nameAndNameStart = FindScriptName(tokenIndex + 1, tokens);

                    var startIndex = FindStartTokenScript(nameAndNameStart.Second, tokens, EsperEPL2GrammarParser.LBRACK);
                    if (startIndex != -1) {
                        var endIndex = FindEndTokenScript(
                            startIndex + 1,
                            tokens,
                            EsperEPL2GrammarParser.RBRACK,
                            EsperEPL2GrammarParser.GetAfterScriptTokens(),
                            !isCreateExpressionClause);
                        if (endIndex != -1) {
                            var writer = new StringWriter();
                            for (var j = startIndex + 1; j < endIndex; j++) {
                                writer.Write(tokens.Get(j).Text);
                            }

                            scripts.Add(writer.ToString());
                            tokenIndexRanges.Add(new UniformPair<int>(startIndex, endIndex));
                            tokenIndex = endIndex;
                        }
                    }
                }

                if (tokens.Get(tokenIndex).Type == EsperEPL2GrammarParser.CLASSDECL) {
                    int startIndex = FindTokenClass(tokenIndex, tokens);
                    if (startIndex != -1) {
                        int endIndex = FindTokenClass(startIndex + 1, tokens);
                        if (endIndex != -1) {

                            StringWriter writer = new StringWriter();
                            for (int j = startIndex + 1; j < endIndex; j++) {
                                writer.Write(tokens.Get(j).Text);
                            }

                            classes.Add(writer.ToString());
                            tokenIndexRanges.Add(new UniformPair<int>(startIndex, endIndex));
                            tokenIndex = endIndex;
                        }
                    }
                }

                tokenIndex++;
            }

            var rewrittenEPL = RewriteEPL(tokenIndexRanges, tokens);
            return new ScriptOrClassResult(rewrittenEPL, scripts, classes);
        }
Ejemplo n.º 5
0
        public void TestUnaryMinusType(string value, int type)
        {
            Setup(value);

            this._calcParser.expression();

            CommonTokenStream ts = (CommonTokenStream)this._calcParser.InputStream;

            Assert.AreEqual(CalcLexer.Minus, ts.Get(0).Type);
            Assert.AreEqual(type, ts.Get(1).Type);

            Assert.AreEqual(0, this._calcParser.NumberOfSyntaxErrors);
        }
        public void testExpressionPow()
        {
            setup("5^3^2");

            PowerExpContext context = parser.expression() as PowerExpContext;

            CommonTokenStream ts = (CommonTokenStream)parser.InputStream;

            Assert.Equal(SpreadsheetLexer.NUMBER, ts.Get(0).Type);
            Assert.Equal(SpreadsheetLexer.T__2, ts.Get(1).Type);
            Assert.Equal(SpreadsheetLexer.NUMBER, ts.Get(2).Type);
            Assert.Equal(SpreadsheetLexer.T__2, ts.Get(3).Type);
            Assert.Equal(SpreadsheetLexer.NUMBER, ts.Get(4).Type);
        }
Ejemplo n.º 7
0
        public void TestVarExpressionType()
        {
            Setup("$VAR(FIELD)");

            this._calcParser.expression();

            CommonTokenStream ts = (CommonTokenStream)this._calcParser.InputStream;

            Assert.AreEqual(CalcLexer.Var, ts.Get(0).Type);
            Assert.AreEqual(CalcLexer.OpenParen, ts.Get(1).Type);
            Assert.AreEqual(CalcLexer.Identifier, ts.Get(2).Type);
            Assert.AreEqual(CalcLexer.CloseParen, ts.Get(3).Type);

            Assert.AreEqual(0, this._calcParser.NumberOfSyntaxErrors);
        }
Ejemplo n.º 8
0
        public static SourceLocation GetSourceLocation(this CommonTokenStream tokenStream, IParseTree context)
        {
            var interval = context.SourceInterval;
            var tokens   = tokenStream.Get(interval.a, interval.b);
            var location = new SourceLocation();

            if (tokens.Count > 0)
            {
                location.LineNumber   = tokens[0].Line;
                location.ColumnNumber = tokens[0].Column;
                var sb    = new StringBuilder();
                int index = 0;
                while (sb.Length < MaxTextLength && index < tokens.Count)
                {
                    sb.Append(tokens[index].Text);
                    index++;
                }

                if (sb.Length > MaxTextLength)
                {
                    sb.Length = MaxTextLength;
                }

                location.Text = sb.ToString();
            }

            return(location);
        }
Ejemplo n.º 9
0
        private static Possible <JPathParser.ExprContext> TryParseInternal(string str)
        {
            var lexer       = new JPathLexer(new AntlrInputStream(str));
            var tokenStream = new CommonTokenStream(lexer);
            var parser      = new JPathParser(tokenStream);
            var listener    = new JPathListener();

            parser.AddErrorListener(listener);
            var expr = parser.expr();

            if (listener.HasErrors)
            {
                return(new Failure <string>("Syntax error: " + listener.FirstError));
            }
            else if (tokenStream.Index < tokenStream.Size - 1)
            {
                var tokens = string.Join("", Enumerable
                                         .Range(tokenStream.Index, count: tokenStream.Size - tokenStream.Index - 1)
                                         .Select(idx => tokenStream.Get(idx).Text));
                return(new Failure <string>($"Unconsumed tokens: {tokens}"));
            }
            else
            {
                return(expr);
            }
        }
Ejemplo n.º 10
0
        private static String RewriteScripts(IList <UniformPair <int?> > ranges, CommonTokenStream tokens)
        {
            if (ranges.IsEmpty())
            {
                return(tokens.GetText());
            }
            var writer                 = new StringWriter();
            var rangeIndex             = 0;
            UniformPair <int?> current = ranges[rangeIndex];

            for (var i = 0; i < tokens.Size; i++)
            {
                var t = tokens.Get(i);
                if (t.Type == EsperEPL2GrammarLexer.Eof)
                {
                    break;
                }
                if (i < current.First)
                {
                    writer.Write(t.Text);
                }
                else if (i == current.First)
                {
                    writer.Write(t.Text);
                    writer.Write("'");
                }
                else if (i == current.Second)
                {
                    writer.Write("'");
                    writer.Write(t.Text);
                    rangeIndex++;
                    if (ranges.Count > rangeIndex)
                    {
                        current = ranges[rangeIndex];
                    }
                    else
                    {
                        current = new UniformPair <int?>(-1, -1);
                    }
                }
                else if (t.Type == EsperEPL2GrammarParser.SL_COMMENT || t.Type == EsperEPL2GrammarParser.ML_COMMENT)
                {
                    WriteCommentEscapeSingleQuote(writer, t);
                }
                else
                {
                    if (t.Type == EsperEPL2GrammarParser.QUOTED_STRING_LITERAL && i > current.First && i < current.Second)
                    {
                        writer.Write("\\'");
                        writer.Write(t.Text.Substring(1, t.Text.Length - 2));
                        writer.Write("\\'");
                    }
                    else
                    {
                        writer.Write(t.Text);
                    }
                }
            }
            return(writer.ToString());
        }
Ejemplo n.º 11
0
            /// <summary>
            /// Gets the index of the first token to the left of the token at
            /// <paramref name="index"/> that's on <paramref name="channel"/>.
            /// If there are no tokens that match, return -1.
            /// </summary>
            /// <param name="tokenStream">The token stream to search
            /// within.</param>
            /// <param name="index">The index of the token to start searching
            /// from.</param>
            /// <param name="channel">The channel to find tokens on.</param>
            /// <returns>The index of the first token before the token at
            /// <paramref name="index"/> that is on the channel <paramref
            /// name="channel"/>. If none is found, returns -1. If <paramref
            /// name="index"/> is beyond the size of <paramref
            /// name="tokenStream"/>, returns the index of the last token in the
            /// stream.</returns>
            private static int IndexOfPreviousTokenOnChannel(CommonTokenStream tokenStream, int index, int channel)
            {
                // Are we beyond the list of tokens?
                if (index >= tokenStream.Size)
                {
                    // Return the final token in the channel, which will be an
                    // EOF.
                    return(tokenStream.Size - 1);
                }

                // 'index' is the token we want to start searching from. We want
                // to find items before it, so start looking from the token
                // before it.
                var currentIndex = index -= 1;

                // Walk backwards through the tokens list.
                while (currentIndex >= 0)
                {
                    IToken token = tokenStream.Get(currentIndex);

                    // Is this token on the channel we're looking for?
                    if (token.Channel == channel)
                    {
                        // We're done - we found one! Return it.
                        return(currentIndex);
                    }
                    currentIndex -= 1;
                }

                // We found nothing. Return the 'not found' value.
                return(-1);
            }
Ejemplo n.º 12
0
        public void TestExpressionPowTypes()
        {
            Setup("5^3^2");

            this._calcParser.expression();

            CommonTokenStream ts = (CommonTokenStream)this._calcParser.InputStream;

            Assert.AreEqual(CalcLexer.IntegerLiteral, ts.Get(0).Type);
            Assert.AreEqual(CalcLexer.Power, ts.Get(1).Type);
            Assert.AreEqual(CalcLexer.IntegerLiteral, ts.Get(2).Type);
            Assert.AreEqual(CalcLexer.Power, ts.Get(3).Type);
            Assert.AreEqual(CalcLexer.IntegerLiteral, ts.Get(4).Type);

            Assert.AreEqual(0, this._calcParser.NumberOfSyntaxErrors);
        }
Ejemplo n.º 13
0
        private void GetCompleteStatement(
            ITextSnapshot snapshot, SnapshotPoint snapPos, out StringBuilder sbErrors, out ITree treeStmt)
        {
            string sql = snapshot.GetText();

            treeStmt  = null;
            sbErrors  = new StringBuilder();
            _position = snapPos.Position;
            _tokens   = RemoveToken(sql, snapPos);
            if (_tokens.Count == 1 && _tokens.Get(0).Type == MySQL51Lexer.EOF)
            {
                return;
            }
            MySQL51Parser.program_return r =
                LanguageServiceUtil.ParseSql(sql, false, out sbErrors, _tokens);
            if (r == null)
            {
                return;
            }
            ITree t = r.Tree as ITree;

            treeStmt = t;
            // locate current statement's AST
            if (t.IsNil)
            {
                ITree tmp = FindStmt(t);
                if (tmp != null)
                {
                    treeStmt = tmp;
                }
            }
        }
Ejemplo n.º 14
0
        public CstNode FinishParsing(CstNode root)
        {
            var count = _stream.Count - 1; // Avoid writing "<EOF>"

            while (count > 0 && _stream.Get(count - 1).Type < 0)
            {
                count--;
            }
            var antlrToken = _stream.Get(count);
            var token      = CreateTerminalNode(
                Code2XmlConstants.EofTokenName, antlrToken, string.Empty, count,
                Code2XmlConstants.EofRuleId);

            root.AddLast(token);
            return(root);
        }
Ejemplo n.º 15
0
        static void Try(string input)
        {
            var str = new AntlrInputStream(input);
//            System.Console.WriteLine(input);
            var lexer           = new Dart2Lexer(str);
            var tokens          = new CommonTokenStream(lexer);
            var parser          = new Dart2Parser(tokens);
            var listener_lexer  = new ErrorListener <int>();
            var listener_parser = new ErrorListener <IToken>();

            lexer.AddErrorListener(listener_lexer);
            parser.AddErrorListener(listener_parser);
            var tree = parser.compilationUnit();

            if (listener_lexer.had_error || listener_parser.had_error)
            {
                System.Console.WriteLine("error in parse.");
                for (int i = 0; ; ++i)
                {
                    var token = tokens.Get(i);
                    System.Console.WriteLine(token.ToString());
                    if (token.Type == Antlr4.Runtime.TokenConstants.EOF)
                    {
                        break;
                    }
                }
            }
            else
            {
                System.Console.WriteLine("parse completed.");
            }
//            System.Console.WriteLine(tree.ToStringTree());
//            Decorate(tree, parser);
//            System.Console.WriteLine(tree.ToStringTree());
        }
Ejemplo n.º 16
0
        /// <summary>
        /// Input an expression token in tree structure, return a single token representing the whole expression
        /// </summary>
        /// <param name="rootToken"></param>
        /// <param name="input"></param>
        /// <returns></returns>
        public static void CheckIsParsingComplete(CommonTokenStream tokens, CommonTree tree)
        {
            if (!(tokens.Index == tokens.Count - 2 && tokens.Get(tokens.Index).Text == "<EOF>" && tokens.Get(tokens.Index + 1).Text == "<EOF>"))
            {
                if (tokens.Index != tokens.Count - 1 || tokens.Get(tokens.Index).Text != "<EOF>")
                {
                    throw new ParsingException("Unrecognized symbol.", tokens.Get(tokens.Index));
                }
            }

            if (tree == null)
            {
                throw new ParsingException(
                          "Please check your first line of your model (not comments), it may start with invalid symbols.",
                          tokens.Get(0));
            }
        }
Ejemplo n.º 17
0
 public static List <IToken> getPostHiddenTokens(ParserRuleReturnScope tree, CommonTokenStream rawTokens)
 {
     if (tree.Tree == null)
     {
         //I think this only happens with implied semicolons
         if (tree.Start is CommonToken)
         {
             //I think we should always be on at least token 1.
             IToken currentTok = rawTokens.Get(((CommonToken)tree.Start).TokenIndex);
             //I go back one token if I am on a non-default channel token so that I can search forward for hidden tokens.
             if (currentTok.Channel != Token.DEFAULT_CHANNEL)
             {
                 currentTok = rawTokens.Get(((CommonToken)tree.Start).TokenIndex - 1);
             }
             return(getPostHiddenTokens(currentTok, rawTokens));
         }
         return(null);
     }
     return(getPostHiddenTokens(getLastTreeToken((CommonTree)tree.Tree), rawTokens));
 }
        public void testExpressionAtomId()
        {
            setup("A1");

            IdAtomExpContext context = parser.expression() as IdAtomExpContext;

            CommonTokenStream ts = (CommonTokenStream)parser.InputStream;

            Assert.Equal(SpreadsheetLexer.ID, ts.Get(0).Type);
            Assert.Equal(null, errorListener.Symbol);
        }
        public void testWrongExpressionAtomId()
        {
            setup("AB1");

            IdAtomExpContext context = parser.expression() as IdAtomExpContext;

            CommonTokenStream ts = (CommonTokenStream)parser.InputStream;

            ts.Seek(0);

            Assert.Equal(SpreadsheetLexer.NAME, ts.Get(0).Type);
            Assert.Equal("<EOF>", errorListener.Symbol);
        }
Ejemplo n.º 20
0
        private static int FindStartTokenScript(int startIndex, CommonTokenStream tokens, int tokenTypeSearch)
        {
            var found = -1;

            for (var i = startIndex; i < tokens.Size; i++)
            {
                if (tokens.Get(i).Type == tokenTypeSearch)
                {
                    return(i);
                }
            }
            return(found);
        }
Ejemplo n.º 21
0
        public CstNode FinishParsing()
        {
            var root  = _dummyRoot.Children().First();
            var count = _stream.Size - 1; // Avoid writing "<EOF>"

            while (count > 0 && _stream.Get(count - 1).Type < 0)
            {
                count--;
            }
            var antlrToken = _stream.Get(count);
            var token      = CreateTerminalNode(
                Code2XmlConstants.EofTokenName, antlrToken, String.Empty, count,
                Code2XmlConstants.EofRuleId);

            // TODO: Deals with a bug in ANTLR 4 temporally
            if (token.Hiddens.Count > 0)
            {
                var location = token.Hiddens.Last().Range.EndLocation;
                token.Token.Range = new CodeRange(location, location);
            }
            root.AddLast(token);
            return(root);
        }
Ejemplo n.º 22
0
        public static string getTokensString(CommonTokenStream tokens, int FirstToken, int LastToken)
        {
            if (LastToken - FirstToken < 0)
            {
                return("");
            }
            string text = "";

            for (int i = FirstToken; i <= LastToken; i++)
            {
                text = text + tokens.Get(i).Text;
            }
            return(text);
        }
Ejemplo n.º 23
0
        private static ScriptResult RewriteTokensScript(CommonTokenStream tokens)
        {
            IList <String> scripts = new List <String>();

            IList <UniformPair <int?> > scriptTokenIndexRanges = new List <UniformPair <int?> >();

            for (var i = 0; i < tokens.Size; i++)
            {
                if (tokens.Get(i).Type == EsperEPL2GrammarParser.EXPRESSIONDECL)
                {
                    var tokenBefore = GetTokenBefore(i, tokens);
                    var isCreateExpressionClause = tokenBefore != null && tokenBefore.Type == EsperEPL2GrammarParser.CREATE;
                    var nameAndNameStart         = FindScriptName(i + 1, tokens);

                    var startIndex = FindStartTokenScript(nameAndNameStart.Second.Value, tokens, EsperEPL2GrammarParser.LBRACK);
                    if (startIndex != -1)
                    {
                        var endIndex = FindEndTokenScript(startIndex + 1, tokens, EsperEPL2GrammarParser.RBRACK, EsperEPL2GrammarParser.GetAfterScriptTokens(), !isCreateExpressionClause);
                        if (endIndex != -1)
                        {
                            var writer = new StringWriter();
                            for (var j = startIndex + 1; j < endIndex; j++)
                            {
                                writer.Write(tokens.Get(j).Text);
                            }
                            scripts.Add(writer.ToString());
                            scriptTokenIndexRanges.Add(new UniformPair <int?>(startIndex, endIndex));
                        }
                    }
                }
            }

            var rewrittenEPL = RewriteScripts(scriptTokenIndexRanges, tokens);

            return(new ScriptResult(rewrittenEPL, scripts));
        }
Ejemplo n.º 24
0
 private static bool IsContainsScriptExpression(CommonTokenStream tokens)
 {
     for (var i = 0; i < tokens.Size; i++)
     {
         if (tokens.Get(i).Type == EsperEPL2GrammarParser.EXPRESSIONDECL)
         {
             var startIndex = FindStartTokenScript(i + 1, tokens, EsperEPL2GrammarParser.LBRACK);
             if (startIndex != -1)
             {
                 return(true);
             }
         }
     }
     return(false);
 }
Ejemplo n.º 25
0
        private static IToken GetTokenBefore(int i, CommonTokenStream tokens)
        {
            var position = i - 1;

            while (position >= 0)
            {
                var t = tokens.Get(position);
                if (t.Channel != 99 && t.Type != EsperEPL2GrammarLexer.WS)
                {
                    return(t);
                }
                position--;
            }
            return(null);
        }
        public void testNumericAtomId(string value)
        {
            setup(value);

            IdAtomExpContext context = parser.expression() as IdAtomExpContext;

            CommonTokenStream ts = (CommonTokenStream)parser.InputStream;

            Assert.Equal(SpreadsheetLexer.NUMBER, ts.Get(0).Type);

            // note that this.errorListener.symbol could be null or empty
            // when ANTLR doesn't recognize the token or there is no error.
            // In such cases check the output of errorListener
            Assert.Equal(null, errorListener.Symbol);
        }
Ejemplo n.º 27
0
        public async Task <PredicateEvaluationOperation <CandidateSearchResult> > Evaluate(string predicateExpression)
        {
            var charStream = new AntlrInputStream(predicateExpression);
            var lexer      = new PredicateLexer(charStream);
            var stream     = new CommonTokenStream(lexer);

            stream.Fill();
            var tokens = stream.Get(0, stream.Size);

            stream.Reset();

            if (tokens.Any(x => x.Type == PredicateLexer.Discardable))
            {
                throw new Exception("Contains unknown tokens");
            }

            var parser = new PredicateParser(stream);

            parser.RemoveErrorListeners();
            parser.AddErrorListener(new ThrowingErrorListener());
            var treeBuilder = new PredicateSyntaxTreeBuilderVisitor();
            var tree        = treeBuilder.Visit(parser.expr());

            var searchBuilder = new ElasticSearchQueryBuilder(_propertyDetailsProvider);
            var query         = searchBuilder.BuildNestQuery(tree);

            var searchResult = await _elasticClient.SearchAsync <CandidateDocument>(new SearchRequest(_index)
            {
                Query = query
            });

            var resultItems = searchResult.Documents
                              .Select(x => new CandidateSearchResultItem
            {
                CurrentJobTitle   = x.CurrentJobTitle,
                Salary            = x.Salary,
                ExperienceInYears = x.ExperienceInYears
            })
                              .ToList();

            return(new PredicateEvaluationOperation <CandidateSearchResult>(new CandidateSearchResult
            {
                Items = resultItems
            }));
        }
Ejemplo n.º 28
0
        //  public static ASDocComment findPreviousComment(ParserRuleReturnScope t, CommonTokenStream rawTokens) {
        //
        //      return findPreviousComment(getFirstTreeToken((CommonTree)t.getTree()), rawTokens);
        //  }
        //
        //  public static ASDocComment findPreviousComment(Token tok, CommonTokenStream rawTokens)
        //  {
        //      int currentTokenIndex=((CommonToken)tok).getTokenIndex()-1;
        ////        List<Token> hiddenTokens=new ArrayList<Token>();
        //
        //      //collect all of the hidden tokens since the last non-whitespace token
        //      while (currentTokenIndex>=0)
        //      {
        //          Token t=rawTokens.get(currentTokenIndex);
        //          if (t.getChannel()==Token.DEFAULT_CHANNEL)
        //              break;
        //
        //          if (t.getType()==ASCollectorLexer.COMMENT_MULTILINE && t.getText().startsWith("/**"))
        //          {
        //              return new ASDocComment(t);
        //          }
        ////            hiddenTokens.add(t);
        //          currentTokenIndex--;
        //      }
        ////        Collections.reverse(hiddenTokens);
        //      return null;
        //  }

        /*public static ASDocComment findCommentReverse(List<IToken> hiddenTokens)
         * {
         *  int currentTokenIndex=hiddenTokens.Count-1;
         *
         *  //collect all of the hidden tokens since the last non-whitespace token
         *  loop: while (currentTokenIndex>=0)
         *  {
         *      IToken t=hiddenTokens[currentTokenIndex];
         *      switch (t.Channel)
         *      {
         *      case CHANNEL_MLCOMMENT:
         *          if (t.Text.StartsWith("/**"))
         *              return new ASDocComment(t);
         *          break loop;
         *      case CHANNEL_WHITESPACE:
         *      case CHANNEL_EOL:
         *          currentTokenIndex--;
         *          break;
         *      default:
         *          break loop;
         *      }
         *  }
         *  return null;
         * }*/

        public static List <IToken> getPostHiddenTokens(IToken tok, CommonTokenStream rawTokens)
        {
            List <IToken> results = new List <IToken>();

            if (tok == null)
            {
                return(results);
            }
            int currentTokenIndex = ((CommonToken)tok).TokenIndex + 1;

            while (currentTokenIndex < rawTokens.Count)
            {
                IToken t = rawTokens.Get(currentTokenIndex);
                if (t.Channel == Token.DEFAULT_CHANNEL)
                {
                    break;
                }

                if (t.Channel == CHANNEL_EOL)
                {
                    break;
                }

                if (t.Channel == CHANNEL_SLCOMMENT)
                {
                    results.Add(t);
                    break;
                }

                results.Add(t);
                currentTokenIndex++;
            }

            //walk backwards to remove whitespace tokens at the end of list
            for (int i = results.Count - 1; i >= 0; i--)
            {
                IToken t = results[i];
                if (t.Channel == CHANNEL_WHITESPACE)
                {
                    results.RemoveAt(i);
                }
            }

            return(results);
        }
Ejemplo n.º 29
0
        internal VB6ModuleVisitor(VBBaseModule rootModule, VisualBasic6Lexer vb6Lexer, VisualBasic6Parser vb6Parser, CommonTokenStream commonTokenStream)
        {
            _rootModule        = rootModule;
            _vb6Lexer          = vb6Lexer;
            _vb6Parser         = vb6Parser;
            _commonTokenStream = commonTokenStream;

            // Filter out VB6 generated comments. Just discard all comments until the last Attribute statement. (Look at VB6 source file with notepad instead of VB6 IDE. It is not shown in IDE.)
            var lastAttributeStmt = (from itm in _commonTokenStream.Get(0, _commonTokenStream.Size)
                                     where (itm.Type == VisualBasic6Parser.ATTRIBUTE)
                                     select itm).LastOrDefault();

            if (lastAttributeStmt != null)
            {
                // Note: This will get token position for the keyword "Attribute", not very accurate, but enough to start.
                _rootModule.LastScanTokenIndex = lastAttributeStmt.TokenIndex;
            }
        }
        public void testWrongVisitFunctionExp()
        {
            setup("logga(100)");

            FunctionExpContext context = parser.expression() as FunctionExpContext;

            DoubleSpreadsheetVisitor visitor = new DoubleSpreadsheetVisitor();
            double result = visitor.VisitFunctionExp(context);

            CommonTokenStream ts = (CommonTokenStream)parser.InputStream;

            // this is syntactically correct and would be true even for a good function
            Assert.Equal(SpreadsheetLexer.NAME, ts.Get(0).Type);
            Assert.Equal(null, errorListener.Symbol);
            // we choose to return 0 if we can't the find function with that NAME
            // so that's how we know is wrong
            Assert.Equal(0, result);
        }