public void TestIgnorCase()
 {
     string xml = @"
     <mapreduce>
         <map>
             <maprule type = 'mapruleont1iftrue' />
             <foreach>
               <rule type = 'ininvalueont3'/>
             </foreach>
         </map>
      <reduce>
             <reducerule type = 'reduceruleont1' />
             <reducerule type = 'assignruleont1' />
         </reduce>
     </mapreduce>";
     XDocument _xDoc = _xDoc = XDocument.Parse(xml);
     XElement source = _xDoc.Element("mapreduce");
     var lexer = new Lexer(source);
     var results = lexer.Lex().ToList();
     Assert.AreEqual(TokenType.MAPREDUCE, results[0].TokenType);
     Assert.AreEqual(TokenType.MAP, results[1].TokenType);
     Assert.AreEqual(TokenType.MAPRULE, results[2].TokenType);
     Assert.AreEqual(TokenType.FOREACH, results[3].TokenType);
     Assert.AreEqual(TokenType.RULE, results[4].TokenType);
     Assert.AreEqual(TokenType.EOF, results[5].TokenType);
     Assert.AreEqual(TokenType.EOF, results[6].TokenType);
     Assert.AreEqual(TokenType.REDUCE, results[7].TokenType);
     Assert.AreEqual(TokenType.REDUCERULE, results[8].TokenType);
     Assert.AreEqual(TokenType.REDUCERULE, results[9].TokenType);
     Assert.AreEqual(TokenType.EOF, results[10].TokenType);
     Assert.AreEqual(TokenType.EOF, results[11].TokenType);
 }
 public void TestLexer()
 {
     string xml = @"
     <MapReduce>
         <Map>
             <MapRule Type = 'MapRuleOnT1IfTrue' />
             <ForEach>
               <Rule Type = 'IninValueOnT3'/>
             </ForEach>
         </Map>
      <Reduce>
             <ReduceRule Type = 'ReduceRuleOnT1' />
             <ReduceRule Type = 'AssignRuleOnT1' />
         </Reduce>
     </MapReduce>";
     XDocument _xDoc = _xDoc = XDocument.Parse(xml);
     XElement source = _xDoc.Element("MapReduce");
     var lexer = new Lexer(source);
     var results = lexer.Lex().ToList();
     Assert.AreEqual(TokenType.MAPREDUCE, results[0].TokenType);
     Assert.AreEqual(TokenType.MAP, results[1].TokenType);
     Assert.AreEqual(TokenType.MAPRULE, results[2].TokenType);
     Assert.AreEqual(TokenType.FOREACH, results[3].TokenType);
     Assert.AreEqual(TokenType.RULE, results[4].TokenType);
     Assert.AreEqual(TokenType.EOF, results[5].TokenType);
     Assert.AreEqual(TokenType.EOF, results[6].TokenType);
     Assert.AreEqual(TokenType.REDUCE, results[7].TokenType);
     Assert.AreEqual(TokenType.REDUCERULE, results[8].TokenType);
     Assert.AreEqual(TokenType.REDUCERULE, results[9].TokenType);
     Assert.AreEqual(TokenType.EOF, results[10].TokenType);
     Assert.AreEqual(TokenType.EOF, results[11].TokenType);
 }
        public virtual void Check(Lexer lexer, Node node)
        {
            AttVal attval;
            bool hasAlt = false;
            bool hasHref = false;

            node.CheckUniqueAttributes(lexer);

            for (attval = node.Attributes; attval != null; attval = attval.Next)
            {
                Attribute attribute = attval.CheckAttribute(lexer, node);

                if (attribute == AttributeTable.AttrAlt)
                {
                    hasAlt = true;
                }
                else if (attribute == AttributeTable.AttrHref)
                {
                    hasHref = true;
                }
            }

            if (!hasAlt)
            {
                lexer.BadAccess |= Report.MISSING_LINK_ALT;
                Report.AttrError(lexer, node, "alt", Report.MISSING_ATTRIBUTE);
            }
            if (!hasHref)
            {
                Report.AttrError(lexer, node, "href", Report.MISSING_ATTRIBUTE);
            }
        }
Exemple #4
0
 public static string Dump(Lexer aLexer)
 {
     using (var xOut = new StringWriter())
     {
         while (true)
         {
             var xToken = aLexer.Scan();
             xOut.Write(xToken.TagValue);
             var xWord = xToken as Word;
             if (xWord != null)
             {
                 xOut.Write("\t");
                 xOut.Write(xWord.Value);
             }
             var xUnknown = xToken as CharToken;
             if (xUnknown != null)
             {
                 xOut.Write("\t");
                 xOut.Write(xUnknown.Value);
             }
             xOut.WriteLine();
             if (xToken.TagValue == Tag.EndOfStream)
             {
                 break;
             }
         }
         return xOut.ToString();
     }
 }
 public void ReadFile()
 {
     Lexer lexer = new Lexer();
     Parser parser = new Parser();
     var lines = GetFileTextLinesAsync().ToList();
     List<Block> block = new List<Block>();
     for (int i = 0; i < lines.Count; i++)
     {
         var tokenList = lexer.ReadLine(lines[i], i);
         foreach (var item in tokenList)
         {
             do
             {
                 parser.Push(item);
             } while (parser.IsLoopingForReduce);
             if (parser.IsAccepted)
             {
                 block.Add(parser.Block);
                 parser.Reset();
                 if (block[block.Count - 1].BlockType == BlockTypes.SETTINGS)
                 /// ACC will not push current item,
                 /// it will cause error
                 {
                     parser.Push(item);
                 }
             }
         }
     }
     Block = block.ToArray();
 }
Exemple #6
0
 public void ShouldReportErrorForUnclosedString()
 {
     var lexer = new Lexer("\"the cow said \\\"moo\\\"");
     AssertTokenIs("string", "the cow said \"moo\"", lexer.Next());
     Assert.AreEqual(lexer.Errors.Count, 1);
     Assert.AreEqual("String not closed - Unexpected end of file", lexer.Errors[0].Message);
 }
Exemple #7
0
 public void ShouldDistinguishTwoDigitOperatorsStartingWithSameChar()
 {
     var lexer = new Lexer("= ==");
     AssertTokenIs("=", lexer.Next());
     AssertTokenIs("==", lexer.Next());
     AssertTokenIs("EOF", lexer.Next());
 }
 public ModuleDefinitionLoader(TextReader rdr, string filename, IProcessorArchitecture arch) : base(null, filename, null)
 {
     this.filename = filename;
     this.lexer = new Lexer(rdr);
     this.bufferedTok = null;
     this.arch = arch;
 }
        public override ParseTree Parse(Lexer lexer, ParserState state)
        {
            state.RuntimeState.Runtime.ParseTrace.Enter(this, lexer.CurrentSource(), "Pattern " + Type.Name);
            
            int start = lexer.Position;
            
            if (state.Excluded.Contains(this))
            {
                state.RuntimeState.Runtime.ParseTrace.No(this, lexer.CurrentSource(), "Excluded");
                return ParseTree.No;
            }
            
		    Precedence oldCurrentPrecedence = state.CurrentPrecedence;
		    
		    if (Precedence.Overwrites(state.CurrentPrecedence))
		        state.CurrentPrecedence = Precedence;

            ParseTree tree = ParseGraph.Parse(lexer, state);
            
            state.CurrentPrecedence = oldCurrentPrecedence;

            if (tree == ParseTree.No)
            {
                state.RuntimeState.Runtime.ParseTrace.No(this, lexer.SourceFrom(start));
                return ParseTree.No;
            }

            state.RuntimeState.Runtime.ParseTrace.Yes(this, lexer.SourceFrom(start));
            
            return tree;
        }
        protected override ResultOfProcess ProcessToken(Lexer.Token token)
        {
            this.Context.Recovery(token);

            var tmpExpressionStatementLeaf = new Syntaxer.StatementLeafs.ExpressionStatementLeaf(this.Context);

            var tmpRez = tmpExpressionStatementLeaf.Run();

            var tmpResultNode = tmpExpressionStatementLeaf.Result;

            if (mCurrStatement == null)
            {
                mCurrStatement = tmpResultNode;

                mMainCodeBlock.FirstStatement = mCurrStatement;
            }
            else
            {
                mCurrStatement.NextStatement = tmpResultNode;

                mCurrStatement = tmpResultNode;
            }

            return tmpRez;
        }
Exemple #11
0
        public void CorrectLineAndColumnNumbers()
        {
            var input = "Hello World!\nSecond line\r\nFoo";

            MemoryStream m = new MemoryStream();
            var w = new StreamWriter(m);
            w.Write(input);
            w.Flush();
            m.Position = 0;

            var word = new Terminal("Word", "\\S+");
            var whitespace = new Terminal("Whitespace", " |\n|\r");

            var lexer = new Lexer(m, word, whitespace);
            Token[] tokens = lexer.ToArray();

            Assert.AreEqual(10, tokens.Length);
            AssertToken(tokens[0], "Word", 1, 1);
            AssertToken(tokens[1], "Whitespace", 1, 6);
            AssertToken(tokens[2], "Word", 1, 7);
            AssertToken(tokens[3], "Whitespace", 1, 13);
            AssertToken(tokens[4], "Word", 2, 1);
            AssertToken(tokens[5], "Whitespace", 2, 7);
            AssertToken(tokens[6], "Word", 2, 8);
            AssertToken(tokens[7], "Whitespace", 2, 12);
            AssertToken(tokens[8], "Whitespace", 2, 13);
            AssertToken(tokens[9], "Word", 3, 1);
        }
Exemple #12
0
 public override ParseTree Parse(Lexer lexer, ParserState state)
 {
     lexer.Whitespace(state.RuntimeState);
     
     int start = lexer.Position;
     
     state.RuntimeState.Runtime.ParseTrace.Enter(this, lexer.CurrentSource(), range.ToString());
     
     char character = lexer.Peek();
     
     if (!range.Contains(character))
     {
         lexer.ErrorStrings[start].Add(range.ToString());
         
         state.RuntimeState.Runtime.ParseTrace.No(this,
             lexer.SourceFrom(start), TextEscape.Quote(character));
         
         return ParseTree.No;
     }
     
     lexer.Read();
     
     state.RuntimeState.Runtime.ParseTrace.Yes(this,
         lexer.SourceFrom(start), TextEscape.Quote(character));
     
     if (state.BuildTextNodes)
         return new ParseTree(Convert.ToString(character));
     else
         return ParseTree.Yes;
 }
Exemple #13
0
        /// <summary>
        /// Parses the given chunk of code and verifies that it matches the expected pretty-printed result
        /// </summary>
        /// <param name="source"></param>
        /// <param name="expected"></param>
        public void Test(string source, string expected)
        {
            var lexer = new Lexer(source);
            Parser parser = new BantamParser(lexer);

            try
            {
                var result = parser.ParseExpression();
                var builder = new StringBuilder();
                result.Print(builder);
                var actual = builder.ToString();

                if (expected.Equals(actual))
                {
                    _passed++;
                }
                else
                {
                    _failed++;
                    Console.Out.WriteLine("[FAIL] Expected: " + expected);
                    Console.Out.WriteLine("         Actual: " + actual);
                }
            }
            catch (ParseException ex)
            {
                _failed++;
                Console.Out.WriteLine("[FAIL] Expected: " + expected);
                Console.Out.WriteLine("          Error: " + ex.Message);
            }
        }
 public virtual void Check(Lexer lexer, Node node)
 {
     if (node.GetAttrByName("src") != null)
     {
         Report.AttrError(lexer, node, "src", Report.PROPRIETARY_ATTR_VALUE);
     }
 }
Exemple #15
0
 public void ShouldRecognizeNumbers()
 {
     var lexer = new Lexer("1 2.3 4.56789");
     AssertTokenIs("number", 1, lexer.Next());
     AssertTokenIs("number", 2.3, lexer.Next());
     AssertTokenIs("number", 4.56789, lexer.Next());
 }
 public ModuleDefinitionLoader(IServiceProvider services, string filename, byte[] bytes) : base(services, filename, bytes)
 {
     this.filename = filename;
     this.lexer = new Lexer( new StreamReader(new MemoryStream(bytes)));
     this.bufferedTok = null;
     this.arch = new Reko.Arch.X86.X86ArchitectureFlat32();
 }
Exemple #17
0
 public void ShouldRecordPositionOfTokens()
 {
     var lexer = new Lexer("one\ntwo\nthree");
     Assert.AreEqual(1, lexer.Next().LineNo);
     Assert.AreEqual(2, lexer.Next().LineNo);
     Assert.AreEqual(3, lexer.Next().LineNo);
 }
Exemple #18
0
 /// <summary>
 /// Finds the object that best matches the phrase
 /// </summary>
 /// <param name="phrase">The words to match</param>
 /// <param name="index">The first word in phrase to match</param>
 /// <param name="score">The score for the best match: 0 if no match[out]</param>
 /// <param name="numWordsMatch">The number of words that match [out]</param>
 /// <param name="bestMatch">The object that best matches [out]</param>
 /// <param name="seen">Set of objects already examined</param>
 public void match(Lexer lexer, ref double score,
            ref int numWordsMatch,
            ref object bestMatch,
            ref LexState lexerState, Dictionary<object,object> seen)
 {
     // Skip if we've already seen this object
       if (seen.ContainsKey(this))
      return;
       // Add ourself to prevent cycles
       seen[this] = this;
     //      edge.match(lexer, ref score, ref numWordsMatch, ref bestMatch, ref lexerState, seen);
     #if true
       /// We only check to see if the gate matches the phrase
       if (null != edge.gate)
      edge.gate.match(lexer, ref score, ref numWordsMatch,
                          ref bestMatch, ref lexerState, seen);
       // Should only match if no door, or door is open
       if (null != edge.sink && (null == edge.gate || isOpen(edge.gate)))
       {
      // we only want match -- matchInContext checks the stuff from the parents..
      edge.sink.match(lexer, ref score, ref numWordsMatch,
                  ref bestMatch, ref lexerState, seen);
       }
     #endif
 }
 public ModuleDefinitionLoader(IServiceProvider services, string filename, byte[]  bytes) : base(services, filename, bytes)
 {
     this.filename = filename;
     var rdr = new StreamReader(new MemoryStream(bytes));
     this.lexer = new Lexer(rdr);
     this.bufferedTok = null;
 }
Exemple #20
0
        public override bool CreateNonTerminal(Lexer lex, ProductionStorage ps)
        {
            // Don't have fraction yet

            //get the leading PA
            string p;
            if (ps.MatchProduction(Selmaho.PA, out p))
                PAs.Add(p);
            else
                return false;

            while (true)
            {
                if (ps.MatchProduction(Selmaho.PA, out p))
                    PAs.Add(p);
                else
                    break; //if we didn't match a PA, then stop trying.
            }

            //eat the BOI that might be there
            ps.MatchProduction(Selmaho.BOI);

            ConstructNumber();

            return true;
        }
Exemple #21
0
        private static void Main(string[] args)
        {
            /*
            if (args.Length != 1)
                exit("Usage: Simplecalc.exe filename");
            */
            using (StreamReader sr = new StreamReader(File.Open("test", FileMode.Open)))
            {
                // Read source
                Lexer lexer = new Lexer(sr);

                // Parse source
                Parser parser = new Parser(lexer);
                Start ast = null;

                try
                {
                    ast = parser.Parse();
                }
                catch (Exception ex)
                {
                    exit(ex.ToString());
                }

                // Print tree
                SimplePrinter printer = new SimplePrinter(true, ConsoleColor.White, ConsoleColor.Gray, ConsoleColor.Red, ConsoleColor.Blue);
                ast.Apply(printer);
            }

            exit("Done");
        }
Exemple #22
0
 public WineSpecFileLoader(IServiceProvider services, string filename, byte[] bytes)
     : base(services, filename, bytes)
 {
     this.filename = filename;
     var rdr = new StreamReader(new MemoryStream(bytes));
     this.lexer = new Lexer(rdr);
 }
Exemple #23
0
        private static Nodes.Assignment ArrayAssignment(Lexer.Lexem currLexem, Nodes.Node.Coords coords)
        {
            Nodes.Expression index = Expr();
            if (Lexer.LookAhead().LexType != Lexer.LexType.CloseSquadBracket)
                ErrorList.Add(new Error(ParserException.NeedCloseSquadBracket, Lexer.LookBack().EndCoords));
            else
                Lexer.NextLexem();

            if (Lexer.LookAhead().LexType == Lexer.LexType.EqualSign)
            {
                Lexer.NextLexem();
                Nodes.Expression value = Expr();
                /*if (Lexer.LookAhead().LexType != Lexer.LexType.Semicolon)
                    ErrorList.Add(new Error(ParserException.NeedSemicolon, Lexer.LookBack().EndCoords));
                else
                    Lexer.NextLexem();*/
                Nodes.Assignment result = new Nodes.Assignment(new Nodes.ElementOfArray(currLexem.VarName, index), value, coords);
                result.Head = result;
                result.Tail = result;
                return result;
            }
            else
            {
                while (Lexer.LookBack().LexType != Lexer.LexType.Variable)
                    Lexer.RollBack();
                Lexer.RollBack();
                return null;
            }
        }
        public override Statement Parse(Parser parser, Lexer.Token current)
        {
            string name = null;

            if (parser.Peek().Type == "IDENTIFIER")
            {
                name = parser.Current.Lexeme;
                parser.Consume("IDENTIFIER");
            }

            parser.Consume("LEFTPAREN");
            var parameters = ParseParameterList(parser).ToArray();
            parser.Consume("RIGHTPAREN");

            var token = parser.Peek();

            if (token.Type == "LEFTBRACE")
            {
                var body = parser.ParseNext();
                return new FunctionDefinitionExpr(new IdentifierExpr(name), parameters, (Statement)body,
                    new IdentifierExpr("UNKNOWN"));
            }
            else if (token.Type == "RIGHTARROW")
            {
                parser.Consume("RIGHTARROW");
                var body = parser.ParseExpression(0);
                parser.Consume("SEMICOLON");
                return new LambdaDefinitionExpr(new IdentifierExpr(name), parameters, body,
                    new IdentifierExpr("UNKNOWN"));
            }

            throw new ParseException("Malformed function defintion");
        }
Exemple #25
0
 public static void Main(string[] args)
 {
     while (true)
     {
         try
         {
             string s = Console.ReadLine();
             Lexer l = new Lexer();
             Parser p = new Parser(l.Lex(s));
             Ast.Chunk c = p.Parse();
             Compiler.Compiler cplr = new SharpLua.Compiler.Compiler();
             LuaFile proto = cplr.Compile(c, "<stdin>");
             Console.WriteLine("compiled!");
             FileStream fs = File.Open("out.sluac", FileMode.Create);
             foreach (char ch in proto.Compile())
             {
                 //Console.WriteLine(ch + " " + (int)ch);
                 fs.WriteByte((byte)ch);
             }
             fs.Close();
             Console.WriteLine("written to out.sluac!");
         }
         catch (Exception ex)
         {
             Console.WriteLine(ex.ToString());
         }
     }
     Console.Write("Press any key to continue . . . ");
     Console.ReadKey(true);
 }
Exemple #26
0
 public void Lexer_CanIdentifyNumberTokens(string input, double expectedValue)
 {
     var lexer = new Lexer(input);
     Assert.IsTrue(lexer.Next());
     Assert.AreEqual(TokenType.Number, lexer.CurrentToken.Type);
     Assert.AreEqual(expectedValue, (double)(Number)lexer.CurrentToken.Value);
 }
Exemple #27
0
    protected void Page_Load(object sender, EventArgs e)
    {
        if (!IsPostBack)
        {
            //GridView1.DataSource = bll.GetScrollMatchList();
            //GridView1.DataBind();
            WebClient web = WebClientBLL.getWebClient();
            string s = web.DownloadString("http://data.nowscore.com/detail/403052.html");
            Lexer lexer = new Lexer(s);
            Parser parser = new Parser(lexer);
            INode tableNode = parser.Parse(new TagNameFilter("HTML"))[0].Children.ExtractAllNodesThatMatch(new TagNameFilter("BODY"))[0].Children[0];
            TableTag tt = tableNode as TableTag;

            Response.Write(tt.GetRow(0).Columns[0].Children[0].Children[0].ToPlainTextString());
            Response.Write(tt.GetRow(0).Columns[1].Children[0].Children[0].ToPlainTextString());
            Response.Write(tt.GetRow(0).Columns[2].Children[0].Children[0].ToPlainTextString());

            //ITag divNode = bodyNodes.ExtractAllNodesThatMatch(new TagNameFilter("FORM"))[0].Children.ExtractAllNodesThatMatch(new TagNameFilter("DIV"))[0] as ITag;
            //if (divNode.Attributes["ID"].Equals("PageBody"))
            //{
            //    NodeList dataDivList = divNode.Children.SearchFor(typeof(Winista.Text.HtmlParser.Tags.Div));

            //}
        }
    }
 public ILexerConfig this[Lexer lexer]
 {
     get
     {
         return this[(int)lexer];
     }
 }
        internal override void Detect(Lexer l)
        {
            if (l.Text.TrimStart().StartsWith("#") &&
                (l.MatchNext(" ") || l.MatchNext("\n")) &&
                l.Text != "#import" &&
                l.Text != "#include")
                l.TakeOwnership();
            else if (!l.Text.TrimStart().StartsWith("#") || l.Text == "#import" || l.Text == "#include")
                l.ForceExclude();

            if (l.HasOwnership())
            {
                if (l.Char == '\\' && l.MatchNext("\n"))
                {
                    this.m_KeepOwnership = true;
                }
                else if (l.Char == '\n' && !this.m_KeepOwnership)
                {
                    l.AddNode(new DirectNode(l.Text));
                    l.EndOwnership();
                }
                else if (this.m_KeepOwnership)
                    this.m_KeepOwnership = false;
            }
        }
    /// <summary>
    /// Count the number of words in this list that matches in the phrase
    /// </summary>
    /// <param name="lexer">The words to match</param>
    /// <param name="count">The number of words that match [out]</param>
    /// <returns>Score for the match: 0 if no match</returns>
    internal double matchMembers(Lexer lexer, out int count)
    {
        var cnt = 0  ; // The number of words that match
          var score=0.0; // The score to match

          // for each of the remaining words
          while (!lexer.EOF)
          {
         lexer.Preprocess();
         var word =lexer.Symbol();

         // See if it matches a modifier
         double score1 = matchMembers(word);

         // check for a minimum score
         if (score1 <= 0.0) break;

         // Update the tracking info
         cnt++;
         score += score1;
          }

          // return the results
          count = cnt;
          return score;
    }
Exemple #31
0
        public SyntaxNode Parse(string input)
        {
            var lexer = new Lexer(input, _tokenMappings);

            return(_parseTree.Parse(new ParserInstance(lexer)));
        }
Exemple #32
0
        internal static bool TryParseIf(JinjaEnvironment environment, Lexer lexer, ILookaroundEnumerator <ParsingNode> enumerator, [NotNullWhen(true)] out ASTNode?parsedNode)
        {
            WhiteSpaceMode thisConditionEnd;
            WhiteSpaceMode nextConditionStart;
            var            conditions = new Queue <ConditionalNode>();

            parsedNode = default;


            if (IfParser.StartBlock.TryParse(enumerator.Current, out var outsideStart, out var thisConditionStart) == false)
            {
                return(false);
            }
            var startParsingNode = enumerator.Current;

            if (IfParser.StartBlock.TryGetAccumulation(IfParser.IfState.Expression, 0, out var previousBlockExpression) == false)
            {
                throw new NotImplementedException();
            }
            enumerator.MoveNext();
            var blockChildren = ASTGenerator.ParseUntilFailure(environment, lexer, enumerator).ToArray();


            while (IfParser.ElseIfBlock.TryParse(enumerator.Current, out thisConditionEnd, out nextConditionStart))
            {
                conditions.Enqueue(new ConditionalNode(startParsingNode, ExpressionNode.FromString(environment, previousBlockExpression), blockChildren, null,
                                                       new WhiteSpaceControlSet(thisConditionStart, thisConditionEnd)
                                                       ));
                if (IfParser.ElseIfBlock.TryGetAccumulation(IfParser.IfState.Expression, 0, out previousBlockExpression) == false)
                {
                    throw new NotImplementedException();
                }
                enumerator.MoveNext();
                blockChildren      = ASTGenerator.ParseUntilFailure(environment, lexer, enumerator).ToArray();
                thisConditionStart = nextConditionStart;
            }

            if (IfParser.ElseBlock.TryParse(enumerator.Current, out thisConditionEnd, out nextConditionStart))
            {
                startParsingNode = enumerator.Current;
                conditions.Enqueue(new ConditionalNode(startParsingNode, ExpressionNode.FromString(environment, previousBlockExpression), blockChildren, null,
                                                       new WhiteSpaceControlSet(thisConditionStart, thisConditionEnd)
                                                       ));
                previousBlockExpression = JinjaEnvironment._TRUE;
                enumerator.MoveNext();
                blockChildren      = ASTGenerator.ParseUntilFailure(environment, lexer, enumerator).ToArray();
                thisConditionStart = nextConditionStart;
            }

            if (IfParser.EndBlock.TryParse(enumerator.Current, out thisConditionEnd, out var outsideEnd) == false)
            {
                throw new NotImplementedException();
            }
            conditions.Enqueue(new ConditionalNode(startParsingNode, ExpressionNode.FromString(environment, previousBlockExpression), blockChildren, null,
                                                   new WhiteSpaceControlSet(thisConditionStart, thisConditionEnd)
                                                   ));

            parsedNode = new IfNode(null, conditions, enumerator.Current,
                                    new WhiteSpaceControlSet(outsideStart, outsideEnd)
                                    );
            return(true);
        }
        private bool BuildChartList()
        {
            // build initial set
            bool result = false;

            ail.net.parser.EarleyParser.Chart chart = BuildInitialSet();

            if (chart != (object)null)
            {
                Closure(chart);

                result = true;

                // build list of sets (fixed point)
                for (;;)
                {
                    ail.net.parser.EarleyParser.Chart prev_chart = chart;

                    if (LexerMode == ail.net.parser.EarleyParser.ELexerMode.eIgnoreWhitespace)
                    {
                        // skip whitespace
                        do
                        {
                            Lexer.NextLexeme();
                        }while(Lexer.Token.Type == (int)ail.net.parser.Token.EType.eWhiteSpace);
                    }
                    else
                    {
                        Lexer.NextLexeme();
                    }

                    if (Lexer.Token.Type == (int)ail.net.parser.Token.EType.eEndOfStream)
                    {
                        if (IsChartCompleted(chart))
                        {
                            Status = ail.net.parser.Parser.EStatus.eRecognized;
                        }
                        else
                        {
                            result = false;

                            Status = ail.net.parser.Parser.EStatus.eFailed;

                            chart = ErrorScan(prev_chart);

                            Closure(chart);
                        }

                        break;
                    }

                    chart = Scan(chart);

                    if (chart == null)
                    {
                        result = false;

                        Status = ail.net.parser.Parser.EStatus.eFailed;

                        chart = ErrorScan(prev_chart);

                        if (chart == null)
                        {
                            break;
                        }
                    }

                    Closure(chart);
                }
            }

#if PRINT_STATS
            if (Charts.Count < 16)
            {
                Console.WriteLine(DecorateCharts());
            }
            else
            {
                Console.WriteLine(DecorateChart((ail.net.parser.EarleyParser.Chart)Charts[Charts.Count - 1]));
            }
#endif
            return(result);
        }
Exemple #34
0
        public void GetStringValue_InvalidStringLiteralToken_ShouldReturnNull(string literalText)
        {
            var token = new Token(TokenType.StringComplete, new TextSpan(0, literalText.Length), literalText, Enumerable.Empty <SyntaxTrivia>(), Enumerable.Empty <SyntaxTrivia>());

            Lexer.TryGetStringValue(token).Should().BeNull();
        }
 public void SetFixture(OperationsSetupFixture data)
 {
     _lexer = new Lexer(data.Operations, data.NumberTransformOperations);
 }
Exemple #36
0
        /// <summary> Internal routine that actually does the parsing.  The caller
        /// can pass either an InputStream or file name.  If both are passed,
        /// the file name is preferred.
        /// </summary>
        internal Node ParseInternal(Stream input, string file, Stream Output, TidyMessageCollection messages)
        {
            Lexer  lexer;
            Node   document = null;
            Node   doctype;
            Out    o = new OutImpl();          /* normal output stream */
            PPrint pprint;

            /* ensure config is self-consistent */
            _options.Adjust();

            if (file != null)
            {
                input = new FileStream(file, FileMode.Open, FileAccess.Read);
            }
            else if (input == null)
            {
                input = Console.OpenStandardInput();
            }

            if (input != null)
            {
                lexer          = new Lexer(new ClsStreamInImpl(input, _options.CharEncoding, _options.TabSize), _options);
                lexer.messages = messages;

                /*
                 * store pointer to lexer in input stream
                 * to allow character encoding errors to be
                 * reported
                 */
                lexer.input.Lexer = lexer;

                /* Tidy doesn't alter the doctype for generic XML docs */
                if (_options.XmlTags)
                {
                    document = ParserImpl.parseXMLDocument(lexer);
                }
                else
                {
                    document = ParserImpl.parseDocument(lexer);

                    if (!document.CheckNodeIntegrity())
                    {
                        Report.BadTree(lexer);
                        return(null);
                    }

                    Clean cleaner = new Clean(_options.tt);

                    /* simplifies <b><b> ... </b> ...</b> etc. */
                    cleaner.NestedEmphasis(document);

                    /* cleans up <dir>indented text</dir> etc. */
                    cleaner.List2BQ(document);
                    cleaner.BQ2Div(document);

                    /* replaces i by em and b by strong */
                    if (_options.LogicalEmphasis)
                    {
                        cleaner.EmFromI(document);
                    }

                    if (_options.Word2000 && cleaner.IsWord2000(document, _options.tt))
                    {
                        /* prune Word2000's <![if ...]> ... <![endif]> */
                        cleaner.DropSections(lexer, document);

                        /* drop style & class attributes and empty p, span elements */
                        cleaner.CleanWord2000(lexer, document);
                    }

                    /* replaces presentational markup by style rules */
                    if (_options.MakeClean || _options.DropFontTags)
                    {
                        cleaner.CleanTree(lexer, document);
                    }

                    if (!document.CheckNodeIntegrity())
                    {
                        Report.BadTree(lexer);
                        return(null);
                    }
                    doctype = document.FindDocType();
                    if (document.Content != null)
                    {
                        if (_options.Xhtml)
                        {
                            lexer.SetXhtmlDocType(document);
                        }
                        else
                        {
                            lexer.FixDocType(document);
                        }

                        if (_options.TidyMark)
                        {
                            lexer.AddGenerator(document);
                        }
                    }

                    /* ensure presence of initial <?XML version="1.0"?> */
                    if (_options.XmlOut && _options.XmlPi)
                    {
                        lexer.FixXmlPI(document);
                    }

                    if (document.Content != null)
                    {
                        Report.ReportVersion(lexer, doctype);
                        Report.ReportNumWarnings(lexer);
                    }
                }

                // Try to close the InputStream but only if if we created it.

                if ((file != null) && (input != Console.OpenStandardOutput()))
                {
                    try
                    {
                        input.Close();
                    }
                    catch (IOException)
                    {
                    }
                }

                if (lexer.messages.Errors > 0)
                {
                    Report.NeedsAuthorIntervention(lexer);
                }

                o.State    = StreamIn.FSM_ASCII;
                o.Encoding = _options.CharEncoding;

                if (lexer.messages.Errors == 0)
                {
                    if (_options.BurstSlides)
                    {
                        Node body;

                        body = null;

                        /*
                         * remove doctype to avoid potential clash with
                         * markup introduced when bursting into slides
                         */
                        /* discard the document type */
                        doctype = document.FindDocType();

                        if (doctype != null)
                        {
                            Node.DiscardElement(doctype);
                        }

                        /* slides use transitional features */
                        lexer.versions |= HtmlVersion.Html40Loose;

                        /* and patch up doctype to match */
                        if (_options.Xhtml)
                        {
                            lexer.SetXhtmlDocType(document);
                        }
                        else
                        {
                            lexer.FixDocType(document);
                        }

                        /* find the body element which may be implicit */
                        body = document.FindBody(_options.tt);

                        if (body != null)
                        {
                            pprint = new PPrint(_options);
                            Report.ReportNumberOfSlides(lexer, pprint.CountSlides(body));
                            pprint.CreateSlides(lexer, document);
                        }
                        else
                        {
                            Report.MissingBody(lexer);
                        }
                    }
                    else if (Output != null)
                    {
                        pprint   = new PPrint(_options);
                        o.Output = Output;

                        if (_options.XmlTags)
                        {
                            pprint.PrintXmlTree(o, (short)0, 0, lexer, document);
                        }
                        else
                        {
                            pprint.PrintTree(o, (short)0, 0, lexer, document);
                        }

                        pprint.FlushLine(o, 0);
                    }
                }

                Report.ErrorSummary(lexer);
            }

            return(document);
        }
Exemple #37
0
 public void InitLexer(Lexer lex)
 {
     ScintillaHelper.Init(sqlTextBox, lex);
 }
Exemple #38
0
 /// <summary>
 /// Constructs a
 /// <see cref="ParseTreePatternMatcher"/>
 /// or from a
 /// <see cref="Antlr4.Runtime.Lexer"/>
 /// and
 /// <see cref="Antlr4.Runtime.Parser"/>
 /// object. The lexer input stream is altered for tokenizing
 /// the tree patterns. The parser is used as a convenient mechanism to get
 /// the grammar name, plus token, rule names.
 /// </summary>
 public ParseTreePatternMatcher(Lexer lexer, Parser parser)
 {
     // e.g., \< and \> must escape BOTH!
     this.lexer  = lexer;
     this.parser = parser;
 }
Exemple #39
0
        /// <summary>
        /// Reparses the code of the current scope and returns the object (either IExpression or ITypeDeclaration derivative)
        /// that is beneath the caret location.
        ///
        /// Used for code completion/symbol resolution.
        /// Mind the extra options that might be passed via the Options parameter.
        /// </summary>
        /// <param name="ctxt">Can be null</param>
        public static ISyntaxRegion GetScopedCodeObject(IEditorData editor,
                                                        AstReparseOptions Options = AstReparseOptions.AlsoParseBeyondCaret,
                                                        ResolutionContext ctxt    = null)
        {
            if (ctxt == null)
            {
                ctxt = ResolutionContext.Create(editor);
            }

            var code = editor.ModuleCode;

            int  start         = 0;
            var  startLocation = CodeLocation.Empty;
            bool IsExpression  = false;

            if (ctxt.CurrentContext.ScopedStatement is IExpressionContainingStatement)
            {
                var         exprs      = ((IExpressionContainingStatement)ctxt.CurrentContext.ScopedStatement).SubExpressions;
                IExpression targetExpr = null;

                if (exprs != null)
                {
                    foreach (var ex in exprs)
                    {
                        if ((targetExpr = ExpressionHelper.SearchExpressionDeeply(ex, editor.CaretLocation))
                            != ex)
                        {
                            break;
                        }
                    }
                }

                if (targetExpr != null && editor.CaretLocation >= targetExpr.Location && editor.CaretLocation <= targetExpr.EndLocation)
                {
                    startLocation = targetExpr.Location;
                    start         = DocumentHelper.GetOffsetByRelativeLocation(editor.ModuleCode, editor.CaretLocation, editor.CaretOffset, startLocation);
                    IsExpression  = true;
                }
            }

            if (!IsExpression)
            {
                // First check if caret is inside a comment/string etc.
                int lastStart = 0;
                int lastEnd   = 0;
                if ((Options & AstReparseOptions.DontCheckForCommentsOrStringSurrounding) == 0)
                {
                    var caretContext = CaretContextAnalyzer.GetTokenContext(code, editor.CaretOffset, out lastStart, out lastEnd);

                    // Return if comment etc. found
                    if (caretContext != TokenContext.None)
                    {
                        return(null);
                    }
                }

                // Could be somewhere in an ITypeDeclaration..

                if (editor.CaretOffset < 0 || editor.CaretOffset >= code.Length)
                {
                    return(null);
                }

                if (Lexer.IsIdentifierPart(code[editor.CaretOffset]))
                {
                    start = editor.CaretOffset;
                }
                else if (editor.CaretOffset > 0 && Lexer.IsIdentifierPart(code[editor.CaretOffset - 1]))
                {
                    start = editor.CaretOffset - 1;
                }


                start = CaretContextAnalyzer.SearchExpressionStart(code, start,
                                                                   (lastEnd > 0 && lastEnd < editor.CaretOffset) ? lastEnd : 0);
                startLocation = DocumentHelper.OffsetToLocation(editor.ModuleCode, start);
            }

            if (start < 0 || editor.CaretOffset < start)
            {
                return(null);
            }

            var sv     = new StringView(code, start, Options.HasFlag(AstReparseOptions.AlsoParseBeyondCaret) ? code.Length - start : editor.CaretOffset - start);
            var parser = DParser.Create(sv);

            parser.Lexer.SetInitialLocation(startLocation);
            parser.Step();

            ITypeDeclaration td;

            if (!IsExpression && Options.HasFlag(AstReparseOptions.OnlyAssumeIdentifierList) && parser.Lexer.LookAhead.Kind == DTokens.Identifier)
            {
                td = parser.IdentifierList();
            }
            else if (IsExpression || parser.IsAssignExpression())
            {
                if (Options.HasFlag(AstReparseOptions.ReturnRawParsedExpression))
                {
                    return(parser.AssignExpression());
                }
                else
                {
                    return(ExpressionHelper.SearchExpressionDeeply(parser.AssignExpression(), editor.CaretLocation));
                }
            }
            else
            {
                td = parser.Type();
            }

            if (Options.HasFlag(AstReparseOptions.ReturnRawParsedExpression))
            {
                return(td);
            }

            while (td != null && td.InnerDeclaration != null && editor.CaretLocation <= td.InnerDeclaration.EndLocation)
            {
                td = td.InnerDeclaration;
            }

            return(td);
        }
Exemple #40
0
 public void GivenIHaveAnInputOf(string p0)
 {
     lexer = new Lexer(new StringContent(p0));
 }
Exemple #41
0
 public void SetUp()
 {
     _lexer         = new Lexer(this);
     tokensReceived = new List <string>();
 }
Exemple #42
0
        public void Test_Small_Program()
        {
            string input          = @"let five = 5;
let ten = 10;

let add = fn(x, y) {
    x + y;
};

let result = add(five, ten);
";
            var    expectedTokens = new List <Token>()
            {
                new Token(TokenType.LET, "let"),
                new Token(TokenType.IDENT, "five"),
                new Token(TokenType.ASSIGN, "="),
                new Token(TokenType.INT, "5"),
                new Token(TokenType.SEMICOLON, ";"),

                new Token(TokenType.LET, "let"),
                new Token(TokenType.IDENT, "ten"),
                new Token(TokenType.ASSIGN, "="),
                new Token(TokenType.INT, "10"),
                new Token(TokenType.SEMICOLON, ";"),

                new Token(TokenType.LET, "let"),
                new Token(TokenType.IDENT, "add"),
                new Token(TokenType.ASSIGN, "="),
                new Token(TokenType.FUNCTION, "fn"),
                new Token(TokenType.LPAREN, "("),
                new Token(TokenType.IDENT, "x"),
                new Token(TokenType.COMMA, ","),
                new Token(TokenType.IDENT, "y"),
                new Token(TokenType.RPAREN, ")"),
                new Token(TokenType.LBRACE, "{"),
                new Token(TokenType.IDENT, "x"),
                new Token(TokenType.PLUS, "+"),
                new Token(TokenType.IDENT, "y"),
                new Token(TokenType.SEMICOLON, ";"),
                new Token(TokenType.RBRACE, "}"),
                new Token(TokenType.SEMICOLON, ";"),

                new Token(TokenType.LET, "let"),
                new Token(TokenType.IDENT, "result"),
                new Token(TokenType.ASSIGN, "="),
                new Token(TokenType.IDENT, "add"),
                new Token(TokenType.LPAREN, "("),
                new Token(TokenType.IDENT, "five"),
                new Token(TokenType.COMMA, ","),
                new Token(TokenType.IDENT, "ten"),
                new Token(TokenType.RPAREN, ")"),
                new Token(TokenType.SEMICOLON, ";"),
                new Token(TokenType.EOF, "")
            };

            var lexer = new Lexer(input);

            foreach (var expectedToken in expectedTokens)
            {
                var nextToken = lexer.NextToken();
                Assert.Equal(expectedToken.Type, nextToken.Type);
                Assert.Equal(expectedToken.Literal, nextToken.Literal);
            }
        }
        private static void ApplyStyle(Lexer lexerType, SortedDictionary <int, ILexerStyle> styles, Queue <string> keyQueue, string var)
        {
            int styleIndex;

            if (keyQueue.Count == 1)
            {
                string strIndex = keyQueue.Dequeue();
                if (!int.TryParse(strIndex, out styleIndex))
                {
                    if (lexerType != Lexer.Null)
                    {
                        object lexStyle = Enum.Parse(Utilities.GetLexerEnumFromLexerType(lexerType), strIndex, true);
                        styleIndex = (int)lexStyle;
                    }
                    else
                    {
                        styleIndex = 0;
                    }
                }
            }
            else
            {
                styleIndex = 0;
            }

            ILexerStyle style                = new LexerStyle(styleIndex);
            string      styleData            = Evaluate(var);
            Dictionary <string, string> dict = PropertiesReader.GetKeyValuePairs(styleData);

            foreach (string styleKey in dict.Keys)
            {
                styleData = dict[styleKey];
                switch (styleKey)
                {
                case "font":
                    style.FontName = styleData;
                    break;

                case "size":
                    style.FontSize = Convert.ToInt32(styleData);
                    break;

                case "fore":
                    style.ForeColor = ColorTranslator.FromHtml(styleData);
                    break;

                case "back":
                    style.BackColor = ColorTranslator.FromHtml(styleData);
                    break;

                case "italics":
                    style.Italics = true;
                    break;

                case "notitalics":
                    style.Italics = false;
                    break;

                case "bold":
                    style.Bold = true;
                    break;

                case "notbold":
                    style.Bold = false;
                    break;

                case "eolfilled":
                    style.EOLFilled = true;
                    break;

                case "noteolfilled":
                    style.EOLFilled = false;
                    break;

                case "underlined":
                    style.Underline = true;
                    break;

                case "notunderlined":
                    style.Underline = false;
                    break;

                case "case":
                    style.CaseVisibility = ((styleData == "m") ? CaseVisible.Mixed : ((styleData == "u") ? CaseVisible.Upper : CaseVisible.Lower));
                    break;
                }
            }
            styles[styleIndex] = style;
        }
Exemple #44
0
 public NonCachingLexerATNSimulator(Lexer recog, ATN atn)
     : base(recog, atn)
 {
 }
Exemple #45
0
        public void TryGetStringValue_WrongTokenType_ShouldReturnNull()
        {
            var token = new Token(TokenType.Number, new TextSpan(0, 2), "12", Enumerable.Empty <SyntaxTrivia>(), Enumerable.Empty <SyntaxTrivia>());

            Lexer.TryGetStringValue(token).Should().BeNull();
        }
Exemple #46
0
        protected void parseSources(ParserFactory factory, IEnumerable <ICharStream> sources)
        {
            Stopwatch startTime = Stopwatch.StartNew();

            Thread.VolatileWrite(ref tokenCount, 0);
            int sourceCount = 0;
            int inputSize   = 0;

#if NET_4_0
            BlockingCollection <int> threadIdentifiers = new BlockingCollection <int>();
            for (int i = 0; i < NUMBER_OF_THREADS; i++)
            {
                threadIdentifiers.Add(i);
            }

            ICollection <Task <int> > results             = new List <Task <int> >();
            QueuedTaskScheduler       executorServiceHost = new QueuedTaskScheduler(NUMBER_OF_THREADS);
            TaskScheduler             executorService     = executorServiceHost.ActivateNewQueue();
#else
            ICollection <Func <int> > results = new List <Func <int> >();
#endif
            foreach (ICharStream input in sources)
            {
                sourceCount++;
                input.Seek(0);
                inputSize += input.Size;
#if NET_4_0
                Task <int> futureChecksum = Task.Factory.StartNew <int>(new Callable_1(input, factory, threadIdentifiers).call, CancellationToken.None, TaskCreationOptions.None, executorService);
#else
                Func <int> futureChecksum = new Callable_1(input, factory).call;
#endif
                results.Add(futureChecksum);
            }

            Checksum checksum = new CRC32();
            foreach (var future in results)
            {
#if NET_4_0
                int value = future.Result;
#else
                int value = future();
#endif
                if (COMPUTE_CHECKSUM)
                {
                    updateChecksum(checksum, value);
                }
            }

#if NET_4_0
            executorServiceHost.Dispose();
#endif

            Console.Out.WriteLine("Total parse time for {0} files ({1} KB, {2} tokens, checksum 0x{3:X8}): {4}ms",
                                  sourceCount,
                                  inputSize / 1024,
                                  Thread.VolatileRead(ref tokenCount),
                                  COMPUTE_CHECKSUM ? checksum.Value : 0,
                                  startTime.ElapsedMilliseconds);

            if (sharedLexers.Length > 0)
            {
                Lexer             lexer            = sharedLexers[0];
                LexerATNSimulator lexerInterpreter = lexer.Interpreter;
                DFA[]             modeToDFA        = lexerInterpreter.atn.modeToDFA;
                if (SHOW_DFA_STATE_STATS)
                {
                    int states  = 0;
                    int configs = 0;
                    HashSet <ATNConfig> uniqueConfigs = new HashSet <ATNConfig>();

                    for (int i = 0; i < modeToDFA.Length; i++)
                    {
                        DFA dfa = modeToDFA[i];
                        if (dfa == null || dfa.states == null)
                        {
                            continue;
                        }

                        states += dfa.states.Count;
                        foreach (DFAState state in dfa.states.Values)
                        {
                            configs += state.configs.Count;
                            uniqueConfigs.UnionWith(state.configs);
                        }
                    }

                    Console.Out.WriteLine("There are {0} lexer DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, lexerInterpreter.atn.GetContextCacheSize());
                }
            }

            if (RUN_PARSER && sharedParsers.Length > 0)
            {
                Parser parser = sharedParsers[0];
                // make sure the individual DFAState objects actually have unique ATNConfig arrays
                ParserATNSimulator interpreter = parser.Interpreter;
                DFA[] decisionToDFA            = interpreter.atn.decisionToDFA;

                if (SHOW_DFA_STATE_STATS)
                {
                    int states  = 0;
                    int configs = 0;
                    HashSet <ATNConfig> uniqueConfigs = new HashSet <ATNConfig>();

                    for (int i = 0; i < decisionToDFA.Length; i++)
                    {
                        DFA dfa = decisionToDFA[i];
                        if (dfa == null || dfa.states == null)
                        {
                            continue;
                        }

                        states += dfa.states.Count;
                        foreach (DFAState state in dfa.states.Values)
                        {
                            configs += state.configs.Count;
                            uniqueConfigs.UnionWith(state.configs);
                        }
                    }

                    Console.Out.WriteLine("There are {0} parser DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, interpreter.atn.GetContextCacheSize());
                }

                int   localDfaCount      = 0;
                int   globalDfaCount     = 0;
                int   localConfigCount   = 0;
                int   globalConfigCount  = 0;
                int[] contextsInDFAState = new int[0];

                for (int i = 0; i < decisionToDFA.Length; i++)
                {
                    DFA dfa = decisionToDFA[i];
                    if (dfa == null || dfa.states == null)
                    {
                        continue;
                    }

                    if (SHOW_CONFIG_STATS)
                    {
                        foreach (DFAState state in dfa.states.Keys)
                        {
                            if (state.configs.Count >= contextsInDFAState.Length)
                            {
                                Array.Resize(ref contextsInDFAState, state.configs.Count + 1);
                            }

                            if (state.isAcceptState)
                            {
                                bool hasGlobal = false;
                                foreach (ATNConfig config in state.configs)
                                {
                                    if (config.ReachesIntoOuterContext)
                                    {
                                        globalConfigCount++;
                                        hasGlobal = true;
                                    }
                                    else
                                    {
                                        localConfigCount++;
                                    }
                                }

                                if (hasGlobal)
                                {
                                    globalDfaCount++;
                                }
                                else
                                {
                                    localDfaCount++;
                                }
                            }

                            contextsInDFAState[state.configs.Count]++;
                        }
                    }

                    if (EXPORT_LARGEST_CONFIG_CONTEXTS)
                    {
                        foreach (DFAState state in dfa.states.Keys)
                        {
                            foreach (ATNConfig config in state.configs)
                            {
                                string configOutput = config.ToDotString();
                                if (configOutput.Length <= configOutputSize)
                                {
                                    continue;
                                }

                                configOutputSize = configOutput.Length;
                                writeFile(tmpdir, "d" + dfa.decision + ".s" + state.stateNumber + ".a" + config.Alt + ".config.dot", configOutput);
                            }
                        }
                    }
                }

                if (SHOW_CONFIG_STATS && currentPass == 0)
                {
                    Console.Out.WriteLine("  DFA accept states: {0} total, {1} with only local context, {2} with a global context", localDfaCount + globalDfaCount, localDfaCount, globalDfaCount);
                    Console.Out.WriteLine("  Config stats: {0} total, {1} local, {2} global", localConfigCount + globalConfigCount, localConfigCount, globalConfigCount);
                    if (SHOW_DFA_STATE_STATS)
                    {
                        for (int i = 0; i < contextsInDFAState.Length; i++)
                        {
                            if (contextsInDFAState[i] != 0)
                            {
                                Console.Out.WriteLine("  {0} configs = {1}", i, contextsInDFAState[i]);
                            }
                        }
                    }
                }
            }
        }
Exemple #47
0
 /// <summary>
 /// <inheritDoc></inheritDoc>
 /// <p>This action is implemented by calling
 /// <see cref="Antlr4.Runtime.Lexer.Type(int)">Antlr4.Runtime.Lexer.Type(int)</see>
 /// with the
 /// value provided by
 /// <see cref="GetType()">GetType()</see>
 /// .</p>
 /// </summary>
 public virtual void Execute(Lexer lexer)
 {
     lexer.Type = type;
 }
Exemple #48
0
 public void KillApp()
 {
     _parser = null;
     _lexer  = null;
 }
Exemple #49
0
        /// <summary>Normalise and parse the given condition values.</summary>
        /// <param name="raw">The raw condition values to normalise.</param>
        /// <param name="tokenContext">The tokens available for this content pack.</param>
        /// <param name="migrator">The migrator which validates and migrates content pack data.</param>
        /// <param name="conditions">The normalised conditions.</param>
        /// <param name="error">An error message indicating why normalisation failed.</param>
        private bool TryParseConditions(InvariantDictionary <string> raw, IContext tokenContext, IMigration migrator, out IList <Condition> conditions, out string error)
        {
            conditions = new List <Condition>();

            // no conditions
            if (raw == null || !raw.Any())
            {
                error = null;
                return(true);
            }

            // parse conditions
            Lexer lexer = new Lexer();

            foreach (KeyValuePair <string, string> pair in raw)
            {
                // get lexical tokens
                ILexToken[] lexTokens = lexer.ParseBits(pair.Key, impliedBraces: true).ToArray();
                for (int i = 0; i < lexTokens.Length; i++)
                {
                    if (!migrator.TryMigrate(ref lexTokens[0], out error))
                    {
                        conditions = null;
                        return(false);
                    }
                }

                // parse condition key
                if (lexTokens.Length != 1 || !(lexTokens[0] is LexTokenToken lexToken))
                {
                    error      = $"'{pair.Key}' isn't a valid token name";
                    conditions = null;
                    return(false);
                }
                ITokenString input = new TokenString(lexToken.InputArg, tokenContext);

                // get token
                IToken token = tokenContext.GetToken(lexToken.Name, enforceContext: false);
                if (token == null)
                {
                    error      = $"'{pair.Key}' isn't a valid condition; must be one of {string.Join(", ", tokenContext.GetTokens(enforceContext: false).Select(p => p.Name).OrderBy(p => p))}";
                    conditions = null;
                    return(false);
                }

                // validate input
                if (!token.TryValidateInput(input, out error))
                {
                    conditions = null;
                    return(false);
                }

                // parse values
                if (string.IsNullOrWhiteSpace(pair.Value))
                {
                    error      = $"can't parse condition {pair.Key}: value can't be empty";
                    conditions = null;
                    return(false);
                }
                if (!this.TryParseStringTokens(pair.Value, tokenContext, migrator, out error, out ITokenString values))
                {
                    error = $"can't parse condition {pair.Key}: {error}";
                    return(false);
                }

                // validate token keys & values
                if (!values.IsMutable && !token.TryValidateValues(input, values.SplitValues(), tokenContext, out string customError))
                {
                    error      = $"invalid {lexToken.Name} condition: {customError}";
                    conditions = null;
                    return(false);
                }

                // create condition
                conditions.Add(new Condition(name: token.Name, input: input, values: values));
            }

            // return parsed conditions
            error = null;
            return(true);
        }
Exemple #50
0
        public static ConstExprNode Parse(Lexer lex)
        {
            ConstExprNode n = new ConstExprNode();

            return(n);
        }
Exemple #51
0
 public void Init()
 {
     _lexer  = new Lexer();
     _parser = new Parser(_lexer);
 }
Exemple #52
0
 public static bool IsPresent(Lexer lex)
 {
     return(true);
 }
Exemple #53
0
/// <summary/>
/// <param name='yyl'></param>
        public Ident(Lexer yyl) : base(yyl)
        {
        }
Exemple #54
0
/// <summary/>
/// <param name='yyl'></param>
        public static object Ident_factory(Lexer yyl)
        {
            return(new Ident(yyl));
        }
Exemple #55
0
            public int parseFile(ICharStream input, int thread)
            {
                Checksum checksum = new CRC32();

                Debug.Assert(thread >= 0 && thread < NUMBER_OF_THREADS);

                try
                {
                    IParseTreeListener listener = sharedListeners[thread];
                    if (listener == null)
                    {
                        listener = (IParseTreeListener)Activator.CreateInstance(listenerClass);
                        sharedListeners[thread] = listener;
                    }

                    Lexer lexer = sharedLexers[thread];
                    if (REUSE_LEXER && lexer != null)
                    {
                        lexer.SetInputStream(input);
                    }
                    else
                    {
                        lexer = (Lexer)lexerCtor.Invoke(new object[] { input });
                        sharedLexers[thread] = lexer;
                        if (!ENABLE_LEXER_DFA)
                        {
                            lexer.Interpreter = new NonCachingLexerATNSimulator(lexer, lexer.Atn);
                        }
                        else if (!REUSE_LEXER_DFA)
                        {
                            lexer.Interpreter = new LexerATNSimulator(lexer, sharedLexerATNs[thread]);
                        }
                    }

                    lexer.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS;
                    if (ENABLE_LEXER_DFA && !REUSE_LEXER_DFA)
                    {
                        lexer.Interpreter.atn.ClearDFA();
                    }

                    CommonTokenStream tokens = new CommonTokenStream(lexer);
                    tokens.Fill();
                    Interlocked.Add(ref tokenCount, tokens.Size);

                    if (COMPUTE_CHECKSUM)
                    {
                        foreach (IToken token in tokens.GetTokens())
                        {
                            updateChecksum(checksum, token);
                        }
                    }

                    if (!RUN_PARSER)
                    {
                        return((int)checksum.Value);
                    }

                    Parser parser = sharedParsers[thread];
                    if (REUSE_PARSER && parser != null)
                    {
                        parser.SetInputStream(tokens);
                    }
                    else
                    {
                        Parser newParser = (Parser)parserCtor.Invoke(new object[] { tokens });
                        parser = newParser;
                        sharedParsers[thread] = parser;
                    }

                    parser.RemoveErrorListeners();
                    if (!TWO_STAGE_PARSING)
                    {
                        parser.AddErrorListener(DescriptiveErrorListener.INSTANCE);
                        parser.AddErrorListener(new SummarizingDiagnosticErrorListener());
                    }

                    if (!ENABLE_PARSER_DFA)
                    {
                        parser.Interpreter = new NonCachingParserATNSimulator(parser, parser.Atn);
                    }
                    else if (!REUSE_PARSER_DFA)
                    {
                        parser.Interpreter = new ParserATNSimulator(parser, sharedParserATNs[thread]);
                    }

                    if (ENABLE_PARSER_DFA && !REUSE_PARSER_DFA)
                    {
                        parser.Interpreter.atn.ClearDFA();
                    }

                    parser.Interpreter.PredictionMode                     = TWO_STAGE_PARSING ? PredictionMode.Sll : PREDICTION_MODE;
                    parser.Interpreter.force_global_context               = FORCE_GLOBAL_CONTEXT && !TWO_STAGE_PARSING;
                    parser.Interpreter.always_try_local_context           = TRY_LOCAL_CONTEXT_FIRST || TWO_STAGE_PARSING;
                    parser.Interpreter.optimize_ll1                       = OPTIMIZE_LL1;
                    parser.Interpreter.optimize_unique_closure            = OPTIMIZE_UNIQUE_CLOSURE;
                    parser.Interpreter.optimize_hidden_conflicted_configs = OPTIMIZE_HIDDEN_CONFLICTED_CONFIGS;
                    parser.Interpreter.optimize_tail_calls                = OPTIMIZE_TAIL_CALLS;
                    parser.Interpreter.tail_call_preserves_sll            = TAIL_CALL_PRESERVES_SLL;
                    parser.Interpreter.treat_sllk1_conflict_as_ambiguity  = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY;
                    parser.BuildParseTree = BUILD_PARSE_TREES;
                    if (!BUILD_PARSE_TREES && BLANK_LISTENER)
                    {
                        parser.AddParseListener(listener);
                    }
                    if (BAIL_ON_ERROR || TWO_STAGE_PARSING)
                    {
                        parser.ErrorHandler = new BailErrorStrategy();
                    }

                    MethodInfo parseMethod = parserClass.GetMethod(entryPoint);
                    object     parseResult;

                    IParseTreeListener checksumParserListener = null;

                    try
                    {
                        if (COMPUTE_CHECKSUM)
                        {
                            checksumParserListener = new ChecksumParseTreeListener(checksum);
                            parser.AddParseListener(checksumParserListener);
                        }
                        parseResult = parseMethod.Invoke(parser, null);
                    }
                    catch (TargetInvocationException ex)
                    {
                        if (!TWO_STAGE_PARSING)
                        {
                            throw;
                        }

                        string sourceName = tokens.SourceName;
                        sourceName = !string.IsNullOrEmpty(sourceName) ? sourceName + ": " : "";
                        Console.Error.WriteLine(sourceName + "Forced to retry with full context.");

                        if (!(ex.InnerException is ParseCanceledException))
                        {
                            throw;
                        }

                        tokens.Reset();
                        if (REUSE_PARSER && sharedParsers[thread] != null)
                        {
                            parser.SetInputStream(tokens);
                        }
                        else
                        {
                            Parser newParser = (Parser)parserCtor.Invoke(new object[] { tokens });
                            parser = newParser;
                            sharedParsers[thread] = parser;
                        }

                        parser.RemoveErrorListeners();
                        parser.AddErrorListener(DescriptiveErrorListener.INSTANCE);
                        parser.AddErrorListener(new SummarizingDiagnosticErrorListener());
                        if (!ENABLE_PARSER_DFA)
                        {
                            parser.Interpreter = new NonCachingParserATNSimulator(parser, parser.Atn);
                        }
                        parser.Interpreter.PredictionMode                     = PREDICTION_MODE;
                        parser.Interpreter.force_global_context               = FORCE_GLOBAL_CONTEXT;
                        parser.Interpreter.always_try_local_context           = TRY_LOCAL_CONTEXT_FIRST;
                        parser.Interpreter.optimize_ll1                       = OPTIMIZE_LL1;
                        parser.Interpreter.optimize_unique_closure            = OPTIMIZE_UNIQUE_CLOSURE;
                        parser.Interpreter.optimize_hidden_conflicted_configs = OPTIMIZE_HIDDEN_CONFLICTED_CONFIGS;
                        parser.Interpreter.optimize_tail_calls                = OPTIMIZE_TAIL_CALLS;
                        parser.Interpreter.tail_call_preserves_sll            = TAIL_CALL_PRESERVES_SLL;
                        parser.Interpreter.treat_sllk1_conflict_as_ambiguity  = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY;
                        parser.BuildParseTree = BUILD_PARSE_TREES;
                        if (!BUILD_PARSE_TREES && BLANK_LISTENER)
                        {
                            parser.AddParseListener(listener);
                        }
                        if (BAIL_ON_ERROR)
                        {
                            parser.ErrorHandler = new BailErrorStrategy();
                        }

                        parseResult = parseMethod.Invoke(parser, null);
                    }
                    finally
                    {
                        if (checksumParserListener != null)
                        {
                            parser.RemoveParseListener(checksumParserListener);
                        }
                    }

                    Assert.IsInstanceOfType(parseResult, typeof(IParseTree));
                    if (BUILD_PARSE_TREES && BLANK_LISTENER)
                    {
                        ParseTreeWalker.Default.Walk(listener, (ParserRuleContext)parseResult);
                    }
                }
                catch (Exception e)
                {
                    if (!REPORT_SYNTAX_ERRORS && e is ParseCanceledException)
                    {
                        return((int)checksum.Value);
                    }

                    throw;
                }

                return((int)checksum.Value);
            }
Exemple #56
0
        public void LexerTest()
        {
            #region INIT LEX RULES
            // initialize lex rules
            // - null
            ILexRule nullRule = new FunctionalLexRule(
                LexMatchType.Null,
                target => {
                if (string.IsNullOrEmpty(target))
                {
                    return(new LexNode(LexMatchType.Null, 0, ""));
                }
                else
                {
                    return(LexNode.NoMatch);
                }
            }
                );
            // - literal integer
            ILexRule intRule = new SimpleLexRule(LexMatchType.LitInt, "\\b[0-9]+\\b");
            // - whitespace
            ILexRule whitespaceRule = new SimpleLexRule(LexMatchType.Whitespace, "\\s+");
            // - literal boolean
            ILexRule boolRule = new UnionLexRule(LexMatchType.LitBool,
                                                 new ILexRule[] {
                // true pattern
                new SimpleLexRule(LexMatchType.LitBool, "\\btrue\\b"),
                // false pattern
                new SimpleLexRule(LexMatchType.LitBool, "\\bfalse\\b")
            }
                                                 );
            // - literal floating-point number
            SequenceLexRule floatRule = new SequenceLexRule(LexMatchType.LitFloat);

            floatRule.Add(intRule);
            floatRule.Add(new SimpleLexRule(LexMatchType.LitFloatSep, "\\."));
            floatRule.Add(intRule);

            // - literal string
            ILexRule         stringQuotRule = new SimpleLexRule(LexMatchType.LitStringQuot, "(?<!\\\\)\"");
            ILexRule         escapeQuotRule = new SimpleLexRule(LexMatchType.LitStringEscape, "\\\"");
            ContainerLexRule stringRule     = new ContainerLexRule(LexMatchType.LitString, stringQuotRule, stringQuotRule);
            #endregion

            #region INIT LEXER
            // initialize lexer
            Lexer myLexer = new Lexer();

            myLexer.AddMultiple(
                stringRule,
                boolRule,
                floatRule,
                intRule,
                whitespaceRule,
                nullRule
                );
            #endregion

            // prepare general variables
            string          source;
            string          expected;
            ILexResultGroup lexedSource;

            // test vigorously
            // 1) null
            source      = "";
            expected    = "";
            lexedSource = myLexer.Lex(source);

            Assert.AreEqual(expected, lexedSource[0].Text);

            // 2) whitespace, integers, and booleans
            source      = "  672    true   786  ";
            lexedSource = myLexer.Lex(source);

            Assert.AreEqual("  ", lexedSource[0].Text);
            Assert.AreEqual("672", lexedSource[1].Text);
            Assert.AreEqual("    ", lexedSource[2].Text);
            Assert.AreEqual("true", lexedSource[3].Text);
            Assert.AreEqual("   ", lexedSource[4].Text);
            Assert.AreEqual("786", lexedSource[5].Text);
            Assert.AreEqual("  ", lexedSource[6].Text);

            // 3) whitespace, floats, and strings
            source      = "  \"My value is 972.987.\"  972.987";
            lexedSource = myLexer.Lex(source);

            Assert.AreEqual("  ", lexedSource[0].Text);
            Assert.AreEqual("\"My value is 972.987.\"", lexedSource[1].Text);
            Assert.AreEqual("  ", lexedSource[2].Text);
            Assert.AreEqual("972.987", lexedSource[3].Text);

            // 4) unlexed
            source      = "645true";
            lexedSource = myLexer.Lex(source);

            Assert.AreEqual("645true", lexedSource[0].Text);
            Assert.IsTrue(lexedSource[0].MatchType == LexMatchType.None);

            source      = "   true  samIAm   ";
            lexedSource = myLexer.Lex(source);

            Assert.AreEqual("samIAm", lexedSource[3].Text);
            Assert.IsTrue(lexedSource[3].MatchType == LexMatchType.None);

            // 5) ensure the lexer doesn't stop at first mismatch
            source      = "\"That\" 1 true 23.9  !";
            lexedSource = myLexer.Lex(source);

            Assert.AreEqual("23.9", lexedSource[6].Text);
            Assert.IsTrue(lexedSource[6].MatchType == LexMatchType.LitFloat);

            Assert.AreEqual("!", lexedSource[8].Text);
            Assert.IsTrue(lexedSource[8].MatchType == LexMatchType.None);
        }
Exemple #57
0
 public DefaultParser(string blockBegin, string blockContinue, string blockEnd, char escape)
 {
     this.lexer = new Lexer(blockBegin, blockContinue, blockEnd, escape);
 }
Exemple #58
0
 public ParserInstance(Lexer lexer)
 {
     _lexer    = lexer;
     _current  = _lexer.Next();
     _previous = null;
 }
        public void verify_valid_tokens_extraction()
        {
            const string expression =
                @"! || && == != < <= > >= + - * / % ~ & ^ | << >> () [] . ? :
                  null true false 123 0.3e-2 0b1010 0xFF '\'\na\n b\nc\n\'' メidメ";

            var lexer  = new Lexer();
            var tokens = lexer.Analyze(expression).ToArray();

            Assert.Equal(37, tokens.Length);
            Assert.Equal("!", tokens[0].Value);
            Assert.Equal(TokenType.L_NOT, tokens[0].Type);
            Assert.Equal("||", tokens[1].Value);
            Assert.Equal(TokenType.L_OR, tokens[1].Type);
            Assert.Equal("&&", tokens[2].Value);
            Assert.Equal(TokenType.L_AND, tokens[2].Type);
            Assert.Equal("==", tokens[3].Value);
            Assert.Equal(TokenType.EQ, tokens[3].Type);
            Assert.Equal("!=", tokens[4].Value);
            Assert.Equal(TokenType.NEQ, tokens[4].Type);
            Assert.Equal("<", tokens[5].Value);
            Assert.Equal(TokenType.LT, tokens[5].Type);
            Assert.Equal("<=", tokens[6].Value);
            Assert.Equal(TokenType.LE, tokens[6].Type);
            Assert.Equal(">", tokens[7].Value);
            Assert.Equal(TokenType.GT, tokens[7].Type);
            Assert.Equal(">=", tokens[8].Value);
            Assert.Equal(TokenType.GE, tokens[8].Type);
            Assert.Equal("+", tokens[9].Value);
            Assert.Equal(TokenType.ADD, tokens[9].Type);
            Assert.Equal("-", tokens[10].Value);
            Assert.Equal(TokenType.SUB, tokens[10].Type);
            Assert.Equal("*", tokens[11].Value);
            Assert.Equal(TokenType.MUL, tokens[11].Type);
            Assert.Equal("/", tokens[12].Value);
            Assert.Equal(TokenType.DIV, tokens[12].Type);
            Assert.Equal("%", tokens[13].Value);
            Assert.Equal(TokenType.MOD, tokens[13].Type);
            Assert.Equal("~", tokens[14].Value);
            Assert.Equal(TokenType.B_NOT, tokens[14].Type);
            Assert.Equal("&", tokens[15].Value);
            Assert.Equal(TokenType.B_AND, tokens[15].Type);
            Assert.Equal("^", tokens[16].Value);
            Assert.Equal(TokenType.XOR, tokens[16].Type);
            Assert.Equal("|", tokens[17].Value);
            Assert.Equal(TokenType.B_OR, tokens[17].Type);
            Assert.Equal("<<", tokens[18].Value);
            Assert.Equal(TokenType.L_SHIFT, tokens[18].Type);
            Assert.Equal(">>", tokens[19].Value);
            Assert.Equal(TokenType.R_SHIFT, tokens[19].Type);
            Assert.Equal("(", tokens[20].Value);
            Assert.Equal(TokenType.L_PAR, tokens[20].Type);
            Assert.Equal(")", tokens[21].Value);
            Assert.Equal(TokenType.R_PAR, tokens[21].Type);
            Assert.Equal("[", tokens[22].Value);
            Assert.Equal(TokenType.L_BRACKET, tokens[22].Type);
            Assert.Equal("]", tokens[23].Value);
            Assert.Equal(TokenType.R_BRACKET, tokens[23].Type);
            Assert.Equal(".", tokens[24].Value);
            Assert.Equal(TokenType.PERIOD, tokens[24].Type);
            Assert.Equal("?", tokens[25].Value);
            Assert.Equal(TokenType.QMARK, tokens[25].Type);
            Assert.Equal(":", tokens[26].Value);
            Assert.Equal(TokenType.COLON, tokens[26].Type);
            Assert.Equal(null, tokens[27].Value);
            Assert.Equal(TokenType.NULL, tokens[27].Type);
            Assert.Equal(true, tokens[28].Value);
            Assert.Equal(TokenType.BOOL, tokens[28].Type);
            Assert.Equal(false, tokens[29].Value);
            Assert.Equal(TokenType.BOOL, tokens[29].Type);
            Assert.Equal(123, tokens[30].Value);
            Assert.Equal(TokenType.INT, tokens[30].Type);
            Assert.Equal(0.3e-2, tokens[31].Value);
            Assert.Equal(TokenType.FLOAT, tokens[31].Type);
            Assert.Equal(10, tokens[32].Value);
            Assert.Equal(TokenType.BIN, tokens[32].Type);
            Assert.Equal(255, tokens[33].Value);
            Assert.Equal(TokenType.HEX, tokens[33].Type);
            Assert.Equal("'\r\na\r\n b\r\nc\r\n'", tokens[34].Value); // used alternatively to verbatim string (new line \n in expression string literal has been replaced by windows \r\n)
            Assert.Equal(TokenType.STRING, tokens[34].Type);
            Assert.Equal("メidメ", tokens[35].Value);
            Assert.Equal(TokenType.ID, tokens[35].Type);
            Assert.Equal(string.Empty, tokens[36].Value);
            Assert.Equal(TokenType.EOF, tokens[36].Type);
        }
Exemple #60
0
 public ParseContext(Lexer _lexer){
     lexer = _lexer;
     NextToken();
 }