public AstNodeArgs(BnfTerm term, CompilerContext context, SourceSpan span, AstNodeList childNodes)
 {
     Context = context;
       Term = term;
       Span = span;
       ChildNodes = childNodes;
 }
        public override Token TryMatch(CompilerContext context, ISourceStream source)
        {
            bool ignoreCase = !Grammar.CaseSensitive;
              //Check starting symbol
              if (!source.MatchSymbol(StartSymbol, ignoreCase)) return null;
              //Find end symbol
              source.Position += StartSymbol.Length;

              while(!source.EOF()) {
            int firstCharPos;
            if (EndSymbols.Count == 1)
              firstCharPos = source.Text.IndexOf(EndSymbols[0], source.Position);
            else
              firstCharPos = source.Text.IndexOfAny(_endSymbolsFirsts, source.Position);
            if (firstCharPos < 0) {
              source.Position = source.Text.Length;
              if (_isLineComment) //if it is LineComment, it is ok to hit EOF without final line-break; just return all until end.
            return Token.Create(this, context, source.TokenStart, source.GetLexeme());
              else
            return Grammar.CreateSyntaxErrorToken(context, source.TokenStart, "Unclosed comment block");
            }
            //We found a character that might start an end symbol; let's see if it is true.
            source.Position = firstCharPos;
            foreach (string endSymbol in EndSymbols)
              if (source.MatchSymbol(endSymbol, ignoreCase)) {
            //We found end symbol
            source.Position += endSymbol.Length;
            return Token.Create(this, context, source.TokenStart, source.GetLexeme());
              }//if
            source.Position++; //move to the next char and try again
              }//while
              return null; //never happens
        }
 public void DoDefaultProcessing(AstNode astRoot, CompilerContext context)
 {
     Scope rootScope = new Scope(astRoot, null);
       astRoot.Scope = rootScope;
       RunPhases(astRoot, context, AstProcessingPhase.CreatingScopes, AstProcessingPhase.Allocating,
        AstProcessingPhase.Linking, AstProcessingPhase.MarkTailCalls);
 }
 public void RunPhases(AstNode astRoot, CompilerContext context, params AstProcessingPhase[] phases)
 {
     IEnumerable<AstNode> allNodes = astRoot.GetAll();
       foreach (AstProcessingPhase phase in phases) {
     foreach (AstNode node in allNodes)
       node.OnAstProcessing(context, phase);
       }//foreach phase
 }
 public override Token TryMatch(CompilerContext context, ISourceStream source)
 {
     Match m = _expression.Match(source.Text, source.Position);
       if (!m.Success)
     return null;
       source.Position += m.Length + 1;
       string text = source.GetLexeme();
       return Token.Create(this, context, source.TokenStart, text);
 }
 public override Token TryMatch(CompilerContext context, ISourceStream source)
 {
     Token token = base.TryMatch(context, source);
     if (token != null && (IdentifierPattern.IsMatch(token.Text) && !token.IsKeyword))
     {
         return token;
     }
     else
     {
         return null;
     }
 }
        public override IEnumerable<Token> BeginFiltering(CompilerContext context, IEnumerable<Token> tokens)
        {
            _prevLine = 0;
              _indents.Clear();
              foreach (Token token in tokens) {
            if (token.Terminal == Grammar.Eof) {
              yield return CreateSpecialToken(Grammar.NewLine, context, token.Location); //this is necessary, because grammar rules reference newLine terminator
              //unindent all buffered indents
              if (_trackIndents)
            foreach (int i in _indents)
              yield return CreateSpecialToken(Grammar.Dedent, context, token.Location);
              _indents.Clear();
              //return EOF token
              yield return token;
              yield break;
            }//if Eof

            //Now deal with normal, non-EOF tokens
            //We intercept only content tokens on new lines
            if (token.Terminal.Category != TokenCategory.Content || token.Location.Line == _prevLine) {
              yield return token;
              continue;
            }
            //if we are here, we have content token on new line; produce newLine token and possibly indents
            yield return CreateSpecialToken(Grammar.NewLine, context, token.Location);
            _prevLine = token.Location.Line;
            if (!_trackIndents) {
              yield return token;
              continue;
            }
            //Now  take care of indents
            int currIndent = token.Location.Column;
            int prevIndent = _indents.Count == 0 ? 0 : _indents.Peek();
            if (currIndent > prevIndent) {
              _indents.Push(currIndent);
              yield return CreateSpecialToken(Grammar.Indent, context, token.Location);
            } else if (currIndent < prevIndent) {
              //produce one or more dedent tokens while popping indents from stack
              while (_indents.Count > 0 && _indents.Peek() > currIndent) {
            _indents.Pop();
            yield return CreateSpecialToken(Grammar.Dedent, context, token.Location);
              }
              if (_indents.Count == 0 || _indents.Peek() != currIndent) {
            yield return Grammar.CreateSyntaxErrorToken (context, token.Location,
                        "Invalid dedent level, no previous matching indent found.");
            //TODO: add error recovery here
              }
            }//else if currIndent < prevIndent
            yield return token;
              } //foreach token
        }
        //Used in unit tests
        public static CompilerContext CreateDummy()
        {
            CompilerContext ctx = new CompilerContext(LanguageCompiler.CreateDummy());

            return(ctx);
        }
 protected virtual Token QuickParse(CompilerContext context, ISourceStream source)
 {
     return(null);
 }
Exemple #10
0
 public static Token Create(Terminal term, CompilerContext context, SourceLocation location, string text)
 {
     return(Create(term, context, location, text, text));
 }
 public virtual string GetSyntaxErrorMessage(CompilerContext context, StringList expectedList)
 {
     return(null); //Irony then would construct default message
 }
Exemple #12
0
 // Override this method in language grammar if you want a custom node creation mechanism.
 public virtual AstNode CreateNode(CompilerContext context, ActionRecord reduceAction, 
                               SourceSpan sourceSpan, AstNodeList childNodes)
 {
     return null;
 }
Exemple #13
0
 //This method is called if Scanner failed to produce token
 public virtual Token TryMatch(CompilerContext context, ISourceStream source)
 {
     return null;
 }
Exemple #14
0
        public int Priority; //default is 0

        #endregion

        #region virtuals
        public virtual Token TryMatch(CompilerContext context, ISourceStream source)
        {
            return(null);
        }
Exemple #15
0
 public override Token TryMatch(CompilerContext context, ISourceStream source)
 {
     return _handler(this, context, source);
 }
 public AstNode Parse(CompilerContext context, SourceFile source)
 {
     _context = context;
       int start = Environment.TickCount;
       Scanner.Prepare(context, source);
       IEnumerable<Token> tokenStream = Scanner.BeginScan();
       //chain all token filters
       foreach (TokenFilter filter in Grammar.TokenFilters) {
     tokenStream = filter.BeginFiltering(context, tokenStream);
       }
       //finally, parser takes token stream and produces root Ast node
       AstNode rootNode = Parser.Parse(context, tokenStream);
       _compileTime = Environment.TickCount - start;
       return rootNode;
 }
Exemple #17
0
        }//method

        public AstNode Parse(CompilerContext context, IEnumerable <Token> tokenStream)
        {
            _context       = context;
            _caseSensitive = _context.Compiler.Grammar.CaseSensitive;
            Reset();
            _input = tokenStream.GetEnumerator();
            NextToken();
            while (true)
            {
                if (_currentState == Data.FinalState)
                {
                    AstNode result = Stack[0].Node;
                    Stack.Reset();
                    return(result);
                }
                //check for scammer error
                if (_currentToken.Terminal.Category == TokenCategory.Error)
                {
                    ReportScannerError();
                    if (!Recover())
                    {
                        return(null);
                    }
                    continue;
                }
                //Get action
                ActionRecord action = GetCurrentAction();
                if (action == null)
                {
                    ReportParserError();
                    if (!Recover())
                    {
                        return(null); //did not recover
                    }
                    continue;
                }//action==null

                if (action.HasConflict())
                {
                    action = Data.Grammar.OnActionConflict(this, _currentToken, action);
                }
                this.OnActionSelected(_currentToken, action);
                switch (action.ActionType)
                {
                case ParserActionType.Operator:
                    if (GetActionTypeForOperation(_currentToken) == ParserActionType.Shift)
                    {
                        goto case ParserActionType.Shift;
                    }
                    else
                    {
                        goto case ParserActionType.Reduce;
                    }

                case ParserActionType.Shift:
                    ExecuteShiftAction(action);
                    break;

                case ParserActionType.Reduce:
                    ExecuteReduceAction(action);
                    break;
                } //switch
            }     //while
        }         //Parse
 //Most numbers in source programs are just one-digit instances of 0, 1, 2, and maybe others until 9
 // so we try to do a quick parse for these, without starting the whole general process
 protected override Token QuickParse(CompilerContext context, ISourceStream source)
 {
     char current = source.CurrentChar;
       if (char.IsDigit(current) && QuickParseTerminators.IndexOf(source.NextChar) >= 0) {
     int iValue = current - '0';
     object value = null;
     switch (DefaultIntTypes[0]) {
       case TypeCode.Int32: value = iValue; break;
       case TypeCode.UInt32: value = (UInt32)iValue; break;
       case TypeCode.Byte: value = (byte)iValue; break;
       case TypeCode.SByte: value = (sbyte) iValue; break;
       case TypeCode.Int16: value = (Int16)iValue; break;
       case TypeCode.UInt16: value = (UInt16)iValue; break;
       default: return null;
     }
     Token token = Token.Create(this, context, source.TokenStart, current.ToString(), value);
     source.Position++;
     return token;
       } else
     return null;
 }
Exemple #19
0
 public void Prepare(CompilerContext context, ISourceStream source)
 {
     _context = context;
       _caseSensitive = context.Compiler.Grammar.CaseSensitive;
       _source = source;
       _currentToken = null;
       _bufferedTokens.Clear();
       ResetSource();
 }
 private Token CreateSpecialToken(Terminal term, CompilerContext context, SourceLocation location)
 {
     return Token.Create(term, context, location, string.Empty);
 }
Exemple #21
0
 public virtual IEnumerable <Token> BeginFiltering(CompilerContext context, IEnumerable <Token> tokens)
 {
     yield break;
 }
Exemple #22
0
 public virtual string GetSyntaxErrorMessage(CompilerContext context, StringList expectedList)
 {
     return null; //Irony then would construct default message
 }
 //Used in unit tests
 public static CompilerContext CreateDummy()
 {
     CompilerContext ctx = new CompilerContext(LanguageCompiler.CreateDummy());
       return ctx;
 }
Exemple #24
0
 public static Token CreateSyntaxErrorToken(CompilerContext context, SourceLocation location, string message, params object[] args)
 {
     if (args != null && args.Length > 0)
     message = string.Format(message, args);
       return Token.Create(Grammar.SyntaxError, context, location, message);
 }
Exemple #25
0
 public override Token TryMatch(CompilerContext context, ISourceStream source)
 {
     return(_handler(this, context, source));
 }
Exemple #26
0
        public AstNode Parse(CompilerContext context, IEnumerable<Token> tokenStream)
        {
            _context = context;
              _caseSensitive = _context.Compiler.Grammar.CaseSensitive;
              Reset();
              _input = tokenStream.GetEnumerator();
              NextToken();
              while (true) {
            if (_currentState == Data.FinalState) {
              AstNode result = Stack[0].Node;
              Stack.Reset();
              return result;
            }
            //check for scammer error
            if (_currentToken.Terminal.Category == TokenCategory.Error) {
              ReportScannerError();
              if (!Recover())
            return null;
              continue;
            }
            //Get action
            ActionRecord action = GetCurrentAction();
            if (action == null) {
              ReportParserError();
              if (!Recover())
            return null; //did not recover
              continue;
            }//action==null

            if (action.HasConflict())
              action = Data.Grammar.OnActionConflict(this, _currentToken, action);
            this.OnActionSelected(_currentToken, action);
            switch (action.ActionType) {
              case ParserActionType.Operator:
            if (GetActionTypeForOperation(_currentToken) == ParserActionType.Shift)
              goto case ParserActionType.Shift;
            else
              goto case ParserActionType.Reduce;

              case ParserActionType.Shift:
            ExecuteShiftAction(action);
            break;

              case ParserActionType.Reduce:
            ExecuteReduceAction(action);
            break;
            }//switch
              }//while
        }
 // Override this method in language grammar if you want a custom node creation mechanism.
 public virtual AstNode CreateNode(CompilerContext context, ActionRecord reduceAction,
                                   SourceSpan sourceSpan, AstNodeList childNodes)
 {
     return(null);
 }