Exemplo n.º 1
0
        public void Test1()
        {
            const string input       = "program HelloWorld begin write read  end";
            var          inputStream = new AntlrInputStream(input);
            var          lexer       = new PascalLexer(inputStream);
            var          tokens      = new CommonTokenStream(lexer);

            tokens.Fill();
            var tokenList = tokens.GetTokens();

            // 12 + 1 -- EOF
            Assert.AreEqual(13, tokenList.Count);
            Assert.AreEqual(PascalLexer.IDENT, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.IDENT, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.IDENT, tokenList[4].Type);
            Assert.AreEqual(PascalLexer.IDENT, tokenList[6].Type);
            Assert.AreEqual(PascalLexer.IDENT, tokenList[8].Type);
            Assert.AreEqual(PascalLexer.IDENT, tokenList[11].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[3].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[5].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[9].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[10].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);
            Assert.Pass();
        }
Exemplo n.º 2
0
        public void Test6()
        {
            const string input1      = "// Valid comment { No longer valid comment !!\n} ";
            var          inputStream = new AntlrInputStream(input1);
            var          lexer       = new PascalLexer(inputStream);
            var          tokens      = new CommonTokenStream(lexer);

            tokens.Fill();
            var tokenList = tokens.GetTokens();

            Assert.AreEqual(5, tokenList.Count);
            Assert.AreEqual(PascalLexer.SINGLE_COMMENT, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.BAD_CHARACTER, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[3].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);

            const string input2 = "// Valid comment (* No longer valid comment !!\n *) ";

            inputStream = new AntlrInputStream(input2);
            lexer       = new PascalLexer(inputStream);
            tokens      = new CommonTokenStream(lexer);
            tokens.Fill();
            tokenList = tokens.GetTokens();
            Assert.AreEqual(7, tokenList.Count);
            Assert.AreEqual(PascalLexer.SINGLE_COMMENT, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.BAD_CHARACTER, tokenList[3].Type);
            Assert.AreEqual(PascalLexer.BAD_CHARACTER, tokenList[4].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[5].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);
            Assert.Pass();
        }
Exemplo n.º 3
0
        public string EditText(string text)
        {
            AntlrInputStream inputStream = new AntlrInputStream(text);

            ITokenSource       lexer;
            IVisitorTree       visitor;
            IChangeTokenSource editorTokens;

            lexer = new HtmlLexer(inputStream);

            visitor      = new HtmlVisitorChangeAtributeValue(factoryNames, lexer.TokenFactory);
            editorTokens = new BaseHtmlEditTokens(visitor);

            lexer        = editorTokens.Edit(lexer);
            visitor      = new HtmlVisitorEditorScriptTag(factoryNames, lexer.TokenFactory, factoryEditor);
            editorTokens = new BaseHtmlEditTokens(visitor);

            lexer        = editorTokens.Edit(lexer);
            visitor      = new HtmlVisitorEditStyleTag(factoryNames, lexer.TokenFactory, factoryEditor);
            editorTokens = new BaseHtmlEditTokens(visitor);

            lexer        = editorTokens.Edit(lexer);
            visitor      = new HtmlVisitorAddSpace(lexer.TokenFactory);
            editorTokens = new BaseHtmlEditTokens(visitor);

            lexer = editorTokens.Edit(lexer);
            CommonTokenStream cs = new CommonTokenStream(lexer);

            cs.Fill();
            return(cs.GetText());
        }
Exemplo n.º 4
0
        static void Main(string[] args)
        {
            using (var fileStream = new FileStream(args[0], FileMode.Open))
            {
                AntlrInputStream  inputStream       = new AntlrInputStream(fileStream);
                Python3Lexer      pyLexer           = new Python3Lexer(inputStream);
                CommonTokenStream commonTokenStream = new CommonTokenStream(pyLexer);

                // print out all the tokens, for debugging lexer grammar.
                commonTokenStream.Fill();
                var tokens = commonTokenStream.GetTokens();
                foreach (var token in tokens)
                {
                    Console.WriteLine(string.Format("{0}: {1}\n", pyLexer.Vocabulary.GetSymbolicName(token.Type), token.Text));
                }

                /*
                 * Python3Parser pyParser = new Python3Parser(commonTokenStream);
                 *
                 * Python3Parser.File_inputContext progContext = pyParser.file_input();
                 *
                 * Console.WriteLine(progContext.ToStringTree());
                 */
            }
        }
Exemplo n.º 5
0
        public static void Format(ICharStream stream, IFormatWriter writer, FormatOptions options)
        {
            var lexer  = new LuaLexer(stream);
            var tokens = new CommonTokenStream(lexer);
            var parser = new LuaParser(tokens);

            tokens.Fill();

            var comments = tokens.GetTokens().Where(t => t.Channel == LuaLexer.Hidden);
            var spaces   = tokens.GetTokens().Where(t => t.Channel == 2);

            parser.BuildParseTree = true;
            parser.TrimParseTree  = false;

            IRuleNode root = parser.chunk();

            var ctx = new FormatContext(root, comments, spaces, writer, options);

            RuleFormatter.Format(root, ctx);

            ctx.WriteComments(int.MaxValue);

            var allTokens = tokens.GetTokens();

            if (allTokens.Count > 0)
            {
                var lastToken = allTokens[allTokens.Count - 1];
                while (ctx.line <= lastToken.Line)
                {
                    ctx.WriteLineBreak();
                }
            }

            tokens.Release(0);
        }
Exemplo n.º 6
0
        public ExpressionTemplate ParseExpressionTemplate(string s)
        {
            var lexer  = new ExpressionTemplateLexer(new AntlrInputStream(s));
            var tokens = new CommonTokenStream(lexer);

            tokens.Fill();
            var parser = new ExpressionTemplateParser(tokens)
            {
                ErrorHandler = new BailErrorStrategy()
            };
            var parseContext = parser.parse();
            ExpressiontemplateContext template;

            if (parseContext == null)
            {
                throw new ParserException();
            }
            else
            {
                template = parseContext.expressiontemplate();
            }
            if (template == null)
            {
                throw new ParserException();
            }
            return(ConvertExpressiontemplate(template));
        }
Exemplo n.º 7
0
        public void Test2()
        {
            const string input       = " 2131 $331AFfcdb $031477701  -%01010111011 ";
            var          inputStream = new AntlrInputStream(input);
            var          lexer       = new PascalLexer(inputStream);
            var          tokens      = new CommonTokenStream(lexer);

            tokens.Fill();
            var tokenList = tokens.GetTokens();

            // 10 + 1 -- EOF
            Assert.AreEqual(11, tokenList.Count);
            Assert.AreEqual(PascalLexer.WS, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.UnsignedNumber, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.UnsignedNumber, tokenList[3].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[4].Type);
            Assert.AreEqual(PascalLexer.UnsignedNumber, tokenList[5].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[6].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[7].Type);
            Assert.AreEqual(PascalLexer.SignedNumber, tokenList[8].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[9].Type);;
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);
            Assert.Pass();
        }
Exemplo n.º 8
0
        private void SyntaxColorRange(int startLine, int startColumn, int endLine, int endColumn)
        {
            var firstLine = startLine;

            m_editor.ClearStyles(startLine, startColumn, endLine, endColumn);

            if (startLine != endLine || startColumn != endColumn)
            {
                ITokenSource lexer  = new Lexer(this.GetParserStream(startLine, startColumn, endLine, endColumn));
                var          tokens = new CommonTokenStream(lexer);
                tokens.Fill();
                foreach (var token in tokens.GetTokens())
                {
                    EditorTextStyles style = EditorTextStyles.Normal;
                    if (token.Type >= 0 && m_tokenStyles.Count > token.Type)
                    {
                        style = m_tokenStyles[token.Type];
                    }
                    if (style != EditorTextStyles.Normal)
                    {
                        m_editor.SetStyle(
                            style,
                            (token.Line - 1) + firstLine,
                            token.Column,
                            (token.Line - 1) + firstLine,
                            token.Column + (token.StopIndex - token.StartIndex) + 1);
                    }
                }
            }
        }
Exemplo n.º 9
0
        public void Test5()
        {
            const string input       = "// afklakfj { ffsf }\n ZEvar313_1_  343ff! -- +23111";
            var          inputStream = new AntlrInputStream(input);
            var          lexer       = new PascalLexer(inputStream);
            var          tokens      = new CommonTokenStream(lexer);

            tokens.Fill();
            var tokenList = tokens.GetTokens();

            Assert.AreEqual(15, tokenList.Count);
            Assert.AreEqual(PascalLexer.SINGLE_COMMENT, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.IDENT, tokenList[3].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[4].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[5].Type);
            Assert.AreEqual(PascalLexer.UnsignedNumber, tokenList[6].Type);
            Assert.AreEqual(PascalLexer.IDENT, tokenList[7].Type);
            Assert.AreEqual(PascalLexer.BAD_CHARACTER, tokenList[8].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[9].Type);
            Assert.AreEqual(PascalLexer.BAD_CHARACTER, tokenList[10].Type);
            Assert.AreEqual(PascalLexer.BAD_CHARACTER, tokenList[11].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[12].Type);
            Assert.AreEqual(PascalLexer.SignedNumber, tokenList[13].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);
            Assert.Pass();
        }
Exemplo n.º 10
0
        /// <summary>
        /// Parses a given property name returning an AST.
        /// </summary>
        /// <param name="propertyName">to parse</param>
        /// <returns>AST syntax tree</returns>
        public static EsperEPL2GrammarParser.StartEventPropertyRuleContext Parse(string propertyName)
        {
            ICharStream input;

            try
            {
                input = new NoCaseSensitiveStream(propertyName);
            }
            catch (IOException ex)
            {
                throw new PropertyAccessException("IOException parsing property name '" + propertyName + '\'', ex);
            }

            var lex    = ParseHelper.NewLexer(input);
            var tokens = new CommonTokenStream(lex);

            try
            {
                tokens.Fill();
            }
            catch (Exception e)
            {
                if (ParseHelper.HasControlCharacters(propertyName))
                {
                    throw new PropertyAccessException("Unrecognized control characters found in text");
                }
                throw new PropertyAccessException("Failed to parse text: " + e.Message);
            }

            var g = ParseHelper.NewParser(tokens);

            EsperEPL2GrammarParser.StartEventPropertyRuleContext r;

            try
            {
                r = g.startEventPropertyRule();
            }
            catch (RecognitionException e)
            {
                return(HandleRecognitionEx(e, tokens, propertyName, g));
            }
            catch (Exception e)
            {
                if (Log.IsDebugEnabled)
                {
                    Log.Debug("Error parsing property expression [" + propertyName + "]", e);
                }
                if (e.InnerException is RecognitionException)
                {
                    return(HandleRecognitionEx((RecognitionException)e.InnerException, tokens, propertyName, g));
                }
                else
                {
                    throw;
                }
            }

            return(r);
        }
Exemplo n.º 11
0
        private CommonTokenStream GetCommonTokenStream(AntlrInputStream inputStream)
        {
            CLexer            lexer             = new CLexer(inputStream);
            CommonTokenStream commonTokenStream = new CommonTokenStream(lexer);

            commonTokenStream.Fill();
            return(commonTokenStream);
        }
Exemplo n.º 12
0
        private IList <IToken> GetTokens(string code)
        {
            var lexer        = new SmalltalkLexer(new AntlrInputStream(code));
            var tokensStream = new CommonTokenStream(lexer);

            tokensStream.Fill();
            return(tokensStream.GetTokens());
        }
Exemplo n.º 13
0
        public static IEnumerable <IToken> Tokenise(string testInput)
        {
            AntlrInputStream  inputStream       = new AntlrInputStream(testInput);
            SiemensGCodeLexer siemensLexer      = new SiemensGCodeLexer(inputStream);
            CommonTokenStream commonTokenStream = new CommonTokenStream(siemensLexer);

            commonTokenStream.Fill();
            return(commonTokenStream.GetTokens());
        }
Exemplo n.º 14
0
 private static void PrintTokens(CommonTokenStream cts)
 {
     cts.Fill();
     foreach (var token in cts.GetTokens())
     {
         Console.WriteLine(token);
     }
     cts.Reset();
 }
Exemplo n.º 15
0
        public static string GetTokenTypes(this CommonTokenStream commonTokenStream)
        {
            commonTokenStream.Fill();
            var tokens = commonTokenStream.GetTokens();

            commonTokenStream.Reset();

            return(string.Join(" ", tokens.Select(t => $"[{VCELLexer.DefaultVocabulary.GetSymbolicName(t.Type)}:{t.Text}]")));
        }
Exemplo n.º 16
0
        /// <summary>
        /// Builds a new parser for the specified grammar and input using the supplied rules.
        /// </summary>
        /// <param name="grammar">The grammar to use.</param>
        /// <param name="inputText">The input text to use.</param>
        /// <param name="option">The parsing options to use.</param>
        /// <returns>A new <see cref="Parser"/> instance.</returns>
        /// <exception cref="GrammarException">No parser found for specified grammar.</exception>
        /// <exception cref="ArgumentNullException"><paramref name="grammar"/> is <see langword="null" />.</exception>
        /// <exception cref="ArgumentNullException"><paramref name="inputText"/> is <see langword="null" />.</exception>
        public Parser BuildParserWithOptions([NotNull] GrammarReference grammar, [NotNull] string inputText, ParseOption option)
        {
            if (grammar is null)
            {
                throw new ArgumentNullException(nameof(grammar));
            }
            if (inputText is null)
            {
                throw new ArgumentNullException(nameof(inputText));
            }
            if (grammar.Parser == null)
            {
                throw new GrammarException($"No parser found for grammar \"{grammar.GrammarName}\"");
            }

            var loader            = new Grammar.Loader();
            var inputStream       = new AntlrInputStream(inputText);
            var lexer             = loader.LoadLexer(grammar, inputStream);
            var commonTokenStream = new CommonTokenStream(lexer);

            commonTokenStream.Fill();
            Tokens       = commonTokenStream.GetTokens();
            SyntaxTokens = ConvertTokensToSyntaxTokens(lexer, Tokens);

            if (option.HasFlag(ParseOption.Tokens))
            {
                foreach (var token in Tokens)
                {
                    Console.WriteLine(token.ToString());
                }
            }

            var parser = loader.LoadParser(grammar, commonTokenStream);

            // Handle Tree parsing option
            parser.BuildParseTree = option.HasFlag(ParseOption.Tree);

            // Handle Diagnostics parsing option
            if (option.HasFlag(ParseOption.Diagnostics))
            {
                parser.AddErrorListener(new DiagnosticErrorListener());
                parser.Interpreter.PredictionMode = Antlr4.Runtime.Atn.PredictionMode.LlExactAmbigDetection;
            }

            // Handle Sll parsing option
            if (option.HasFlag(ParseOption.Sll))
            {
                parser.Interpreter.PredictionMode = Antlr4.Runtime.Atn.PredictionMode.Sll;
            }

            // Handle Trace parsing option
            parser.Trace = option.HasFlag(ParseOption.Trace);

            return(parser);
        }
Exemplo n.º 17
0
        public static IList <CommonToken> copy(CommonTokenStream tokens)
        {
            IList <CommonToken> copy = new List <CommonToken>();

            tokens.Fill();
            foreach (Token t in tokens.GetTokens())
            {
                copy.Add(new CommonToken(t));
            }
            return(copy);
        }
        private static void Lex(string codePath, TextWriter output)
        {
            var lexer  = new AdamantLexer(codePath);
            var tokens = new CommonTokenStream(lexer);

            tokens.Fill();
            foreach (var token in tokens.GetTokens())
            {
                output.WriteLine(Format(token));
            }
        }
Exemplo n.º 19
0
        public void Test4()
        {
            const string input1      = "{ comment 1 // Comment 2 }";
            var          inputStream = new AntlrInputStream(input1);
            var          lexer       = new PascalLexer(inputStream);
            var          tokens      = new CommonTokenStream(lexer);

            tokens.Fill();
            var tokenList = tokens.GetTokens();

            Assert.AreEqual(2, tokenList.Count);
            Assert.AreEqual(PascalLexer.MultiComment2, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);

            const string input2 = "// comment 1 { comment 2 }  ";

            inputStream = new AntlrInputStream(input2);
            lexer       = new PascalLexer(inputStream);
            tokens      = new CommonTokenStream(lexer);
            tokens.Fill();
            tokenList = tokens.GetTokens();
            Assert.AreEqual(2, tokenList.Count);
            Assert.AreEqual(PascalLexer.SINGLE_COMMENT, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);

            const string input3 = " (* Comment 1 { comment 2 } *) ";

            inputStream = new AntlrInputStream(input3);
            lexer       = new PascalLexer(inputStream);
            tokens      = new CommonTokenStream(lexer);
            tokens.Fill();
            tokenList = tokens.GetTokens();

            Assert.AreEqual(4, tokenList.Count);
            Assert.AreEqual(PascalLexer.WS, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.MultiComment1, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);

            const string input4 = "{ Comment 1 (* comment 2 *) } ";

            inputStream = new AntlrInputStream(input4);
            lexer       = new PascalLexer(inputStream);
            tokens      = new CommonTokenStream(lexer);
            tokens.Fill();
            tokenList = tokens.GetTokens();
            Assert.AreEqual(3, tokenList.Count);
            Assert.AreEqual(PascalLexer.MultiComment2, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);

            Assert.Pass();
        }
Exemplo n.º 20
0
        TimeSpan Iterate(string text, int count)
        {
            DateTime start = DateTime.Now;

            for (int i = 0; i < count; i++)
            {
                SimpleExpressionLexer lexer  = new SimpleExpressionLexer(new ANTLRStringStream(text));
                CommonTokenStream     tokens = new CommonTokenStream(lexer);
                tokens.Fill();
            }

            return(DateTime.Now - start);
        }
Exemplo n.º 21
0
 private static TokenInfo[] GetTokens(CommonTokenStream commonTokenStream)
 {
     commonTokenStream.Fill();
     return((from t in commonTokenStream.GetTokens()
             select new TokenInfo
     {
         Type = $"{t.TypeEnum()} ({t.Type})",
         Text = t.Text,
         Length = t.StopIndex - t.StartIndex,
         Line = t.Line,
         Column = t.Column
     }).ToArray());
 }
Exemplo n.º 22
0
        public static Entity Parse(string source)
        {
            var lexer       = new AngouriMathLexer(new AntlrInputStream(source), null, new AngouriMathTextWriter());
            var tokenStream = new CommonTokenStream(lexer);

            tokenStream.Fill();
            var tokenList = tokenStream.GetTokens();

            const string NUMBER            = nameof(NUMBER);
            const string VARIABLE          = nameof(VARIABLE);
            const string PARENTHESIS_OPEN  = "'('";
            const string PARENTHESIS_CLOSE = "')'";
            const string FUNCTION_OPEN     = "\x1"; // Fake display name for all function tokens e.g. "'sin('"
Exemplo n.º 23
0
        static void Main(string[] args)
        {
            try {
                if (args.Length > 0)
                {
                    string fn;
                    if (args[0] == "-q")
                    {
                        fn = args[1];
                    }
                    else
                    {
                        fn = args[0];
                    }
                    ais = new AntlrInputStream(new StreamReader(fn));

                    Console.Error.WriteLine("lex...");
                    lexer = new SVLexer(ais);
                    cts   = new CommonTokenStream(lexer);
                    cts.Fill();

                    Console.Error.WriteLine("parse...");
                    parser = new SVParser(cts);
                    prc    = parser.source_text();

                    if (args[0] != "-q")
                    {
                        Console.Error.WriteLine("output...");
                        PrintTokens(cts);
                        PrintSyntaxTree(prc, 0);
                    }
                    else
                    {
                        CheckSyntaxTree(prc, 0);
                    }
                }
                else
                {
                    Console.WriteLine("Usage:");
                    Console.WriteLine("  SVTest <inputfile>");
                }
            } catch (Exception ex) {
                Console.Error.WriteLine("Error: " + ex.Message);
            }

            if (errCount > 0)
            {
                Console.Error.WriteLine("Error: " + errCount + " syntax error found.");
                Environment.Exit(-1);
            }
        }
        private AclScriptParser PrepareParser(string code, out AclErrorListener errorListener, out CommonTokenStream tokens)
        {
            errorListener = new AclErrorListener();
            AntlrInputStream input = new AntlrInputStream(code);
            AclScriptLexer   lexer = new AclScriptLexer(input);

            lexer.AddErrorListener(errorListener);
            tokens = new CommonTokenStream(lexer);
            AclScriptParser parser = new AclScriptParser(tokens);

            parser.AddErrorListener(errorListener);
            tokens.Fill();
            return(parser);
        }
Exemplo n.º 25
0
        public void Test3()
        {
            const string input1      = "// 2131 +faf324v __??.f \n//dsaffa ";
            var          inputStream = new AntlrInputStream(input1);
            var          lexer       = new PascalLexer(inputStream);
            var          tokens      = new CommonTokenStream(lexer);

            tokens.Fill();
            var tokenList = tokens.GetTokens();

            // 3 + 1 -- EOF
            Assert.AreEqual(4, tokenList.Count);
            Assert.AreEqual(PascalLexer.SINGLE_COMMENT, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.SINGLE_COMMENT, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);


            const string input2 = "{ My beautiful function returns an interesting result }  ";

            inputStream = new AntlrInputStream(input2);
            lexer       = new PascalLexer(inputStream);
            tokens      = new CommonTokenStream(lexer);
            tokens.Fill();
            tokenList = tokens.GetTokens();
            Assert.AreEqual(4, tokenList.Count);
            Assert.AreEqual(PascalLexer.MultiComment2, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);

            const string input3 = "(* This is an old style comment *)\n{  This is a Turbo Pascal comment }\n// This is a Delphi comment. All is ignored till the end of the line. ";

            inputStream = new AntlrInputStream(input3);
            lexer       = new PascalLexer(inputStream);
            tokens      = new CommonTokenStream(lexer);
            tokens.Fill();
            tokenList = tokens.GetTokens();
            Assert.AreEqual(6, tokenList.Count);
            Assert.AreEqual(PascalLexer.MultiComment1, tokenList[0].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[1].Type);
            Assert.AreEqual(PascalLexer.MultiComment2, tokenList[2].Type);
            Assert.AreEqual(PascalLexer.WS, tokenList[3].Type);
            Assert.AreEqual(PascalLexer.SINGLE_COMMENT, tokenList[4].Type);
            Assert.AreEqual(PascalLexer.Eof, tokenList.Last().Type);

            Assert.Pass();
        }
Exemplo n.º 26
0
        public virtual ITokenSource Edit(ITokenSource tokens)
        {
            CommonTokenStream s      = new CommonTokenStream(tokens);
            JsParser          parser = new JsParser(s);

            s.Fill();

            var program = parser.program();

            visitor.Visit(program, s.GetTokens());

            var source = new ListTokenSource(visitor.GetResult());

            source.TokenFactory = tokens.TokenFactory;
            return(source);
        }
Exemplo n.º 27
0
        public ITokenSource Edit(ITokenSource tokens)
        {
            CommonTokenStream s      = new CommonTokenStream(tokens);
            HtmlParser        parser = new HtmlParser(s);

            s.Fill();

            var program = parser.htmlDocument();

            visitor.Visit(program, s.GetTokens());
            var list   = visitor.GetResult();
            var result = new ListTokenSource(list);

            result.TokenFactory = tokens.TokenFactory;
            return(result);
        }
Exemplo n.º 28
0
        public static IEnumerable <FanucGCodeTextSpan> Tokenise(string testInput)
        {
            AntlrInputStream  inputStream       = new AntlrInputStream(testInput);
            FanucGCodeLexer   fanucLexer        = new FanucGCodeLexer(inputStream);
            CommonTokenStream commonTokenStream = new CommonTokenStream(fanucLexer);

            commonTokenStream.Fill();
            var tokens = commonTokenStream.GetTokens();

            return(tokens.Where(t => t.Type != FanucGCodeLexer.Eof).Select(t => new FanucGCodeTextSpan
            {
                StartPos = t.StartIndex,
                Length = (t.StopIndex - t.StartIndex) + 1,
                TokenType = (FanucGCodeTokenTypes)t.Type
            }));
        }
Exemplo n.º 29
0
        public void TestEmptyComment()
        {
            string inputText   = "/**/ ";
            var    input       = new ANTLRStringStream(inputText);
            var    lexer       = new PreprocessorLexer(input);
            var    tokenStream = new CommonTokenStream(lexer);

            tokenStream.Fill();

            List <IToken> tokens = tokenStream.GetTokens();

            Assert.AreEqual(2, tokens.Count);
            Assert.AreEqual(PreprocessorLexer.DELIMITED_COMMENT, tokens[0].Type);
            Assert.AreEqual("/**/", tokens[0].Text);
            Assert.AreEqual(PreprocessorLexer.EOF, tokens[1].Type);
        }
Exemplo n.º 30
0
        static void Main(string[] args)
        {
            AntlrInputStream inputStream = new AntlrInputStream(Console.In);

            GramaticaLexer lexer = new GramaticaLexer(inputStream);

            CommonTokenStream bts = new CommonTokenStream(lexer);

            bts.Fill();
            IToken tk = lexer.NextToken();
            //tk.GetType();

            GramaticaParser p = new GramaticaParser(bts);

            p.start();
            Console.ReadKey();
        }