Exemplo n.º 1
0
    public override TokenAst TryMatch(CompilerContext context, ISourceStream source)
    {
      if (!source.MatchSymbol(_startSymbol, false)) return null;
      source.Position += _startSymbol.Length;

      while (!source.EOF())
      {
        int firstCharPos = source.Text.IndexOf(_endSymbol, source.Position);

        if (firstCharPos < 0)
        {
          source.Position = source.Text.Length;

          if (_isLineComment)
            return TokenAst.Create(this, context, source.TokenStart, source.GetLexeme());
          else
            return Grammar.CreateSyntaxErrorToken(context, source.TokenStart, "Unclosed comment block");
        }
      
        source.Position = firstCharPos;
        if (source.MatchSymbol(_endSymbol, false))
        {
          source.Position += _endSymbol.Length;
          return TokenAst.Create(this, context, source.TokenStart, source.GetLexeme());
        }
        
        source.Position++; 
      }

      throw new NotSupportedException();
    }
Exemplo n.º 2
0
 public void Prepare(CompilerContext context, ISourceStream source)
 {
   _context = context;
   _source = source;
   _currentToken = null;
   _bufferedTokens.Clear();
   ResetSource();
 }
Exemplo n.º 3
0
 public AstNode Parse(string source)
 {
   Context = new CompilerContext(this);
   Scanner.Prepare(Context, new SourceFile(source, "Source"));
   var tokenStream = Scanner.BeginScan();
   var rootNode = Parser.Parse(Context, tokenStream);
   return rootNode;
 }
Exemplo n.º 4
0
    protected override TokenAst CreateToken(CompilerContext context, ISourceStream source)
    {
      TokenAst token = base.CreateToken(context, source);

      if (Keywords.Contains(token.Text))
        token.IsKeyword = true;

      return token;
    }
Exemplo n.º 5
0
    public override TokenAst TryMatch(CompilerContext context, ISourceStream source)
    {
      bool isVerbatim = false;
      int start = source.Position;

      if (source.CurrentChar == '@')
      {
        isVerbatim = true;
        source.Position++;
        start++;
      }

      if (IsCurrentQuote(source))
      {
        source.Position++;
        start++;
      }
      else 
        return null;

      while (!source.EOF())
      {
        if (!isVerbatim)
        {
          if (source.CurrentChar == '\\')
          {
            //TODO: Escape processing
            source.Position += 2;
            continue;
          }
          else
            //Single line string ends incorrectly
            if (ParserData.LineTerminators.IndexOf(source.CurrentChar) >= 0)
              return null;
        }

        if (IsCurrentQuote(source)) break;
        
        source.Position++;
      }

      if (IsCurrentQuote(source))
        source.Position++;
      else
        return null;
      
      string lexeme = source.GetLexeme();
      string body = source.Text.Substring(start, source.Position - start - 1);
      //TODO: handle this in escape processing
      if (!isVerbatim)
        body = body.Replace("\\'", "'").Replace("\\\"", "\"").Replace("\\\\", "\\");

      TokenAst token = TokenAst.Create(this, context, source.TokenStart, lexeme, body);     
      return token;

      //return Grammar.CreateSyntaxErrorToken(context, source.TokenStart, "Failed to convert the value");
    }
Exemplo n.º 6
0
 public override TokenAst TryMatch(CompilerContext context, ISourceStream source)
 {
   Match result = Expression.Match(source.Text, source.Position);
   if (!result.Success)
     return null;
   source.Position += result.Length;
   
   return CreateToken(context, source);
 }
Exemplo n.º 7
0
    public override TokenAst TryMatch(CompilerContext context, ISourceStream source)
    {
      if (!source.MatchSymbol(Symbol, false))
        return null;
    
      source.Position += Symbol.Length;
      TokenAst tokenAst = TokenAst.Create(this, context, source.TokenStart, Symbol);

      return tokenAst;
    }
Exemplo n.º 8
0
    public AstNode Parse(CompilerContext context, IEnumerable<TokenAst> tokenStream)
    {
      _context = context;
      Reset();
      _input = tokenStream.GetEnumerator();
      NextToken();
      
      while (true)
      {
        if (_currentState == Data.FinalState)
        {
          AstNode result = _stack[0].Node;
          _stack.Reset();
          return result;
        }
        
        if (_currentToken.Terminal.Category == TokenCategory.Error)
        {
          ReportScannerError();
          return null;
        }

        ActionRecord action = GetCurrentAction();
        if (action == null)
        {
          ReportParserError();          
          return null;
        }

        //TODO: perform conflict resolving
        //if (action.HasConflict())

        switch (action.ActionType)
        {
          case ParserActionType.Operator:
            if (GetActionTypeForOperation(_currentToken) == ParserActionType.Shift)
              goto case ParserActionType.Shift;
            else
              goto case ParserActionType.Reduce;

          case ParserActionType.Shift:
            ExecuteShiftAction(action);
            break;

          case ParserActionType.Reduce:
            ExecuteReduceAction(action);
            break;
        }
      }
    }
Exemplo n.º 9
0
 protected override TokenAst CreateToken(CompilerContext context, ISourceStream source)
 {
   TokenAst token = base.CreateToken(context, source);
   token.Value = ConvertNumber(token.Text);
   return token;
 }
Exemplo n.º 10
0
 public static TokenAst CreateSyntaxErrorToken(CompilerContext context, SourceLocation location, string message, params object[] args)
 {
   if (args != null && args.Length > 0)
     message = string.Format(message, args);
   return TokenAst.Create(Grammar.SyntaxError, context, location, message);
 }
Exemplo n.º 11
0
        public override TokenAst TryMatch(CompilerContext context, ISourceStream source)
        {
            bool isVerbatim = false;
            int  start      = source.Position;

            if (source.CurrentChar == '@')
            {
                isVerbatim = true;
                source.Position++;
                start++;
            }

            if (IsCurrentQuote(source))
            {
                source.Position++;
                start++;
            }
            else
            {
                return(null);
            }

            while (!source.EOF())
            {
                if (!isVerbatim)
                {
                    if (source.CurrentChar == '\\')
                    {
                        //TODO: Escape processing
                        source.Position += 2;
                        continue;
                    }
                    else
                    //Single line string ends incorrectly
                    if (ParserData.LineTerminators.IndexOf(source.CurrentChar) >= 0)
                    {
                        return(null);
                    }
                }

                if (IsCurrentQuote(source))
                {
                    break;
                }

                source.Position++;
            }

            if (IsCurrentQuote(source))
            {
                source.Position++;
            }
            else
            {
                return(null);
            }

            string lexeme = source.GetLexeme();
            string body   = source.Text.Substring(start, source.Position - start - 1);

            //TODO: handle this in escape processing
            if (!isVerbatim)
            {
                body = body.Replace("\\'", "'").Replace("\\\"", "\"").Replace("\\\\", "\\");
            }

            TokenAst token = TokenAst.Create(this, context, source.TokenStart, lexeme, body);

            return(token);

            //return Grammar.CreateSyntaxErrorToken(context, source.TokenStart, "Failed to convert the value");
        }
Exemplo n.º 12
0
 public virtual TokenAst TryMatch(CompilerContext context, ISourceStream source)
 {
   return null;
 }
Exemplo n.º 13
0
 public virtual TokenAst TryMatch(CompilerContext context, ISourceStream source)
 {
     return(null);
 }
Exemplo n.º 14
0
 protected virtual TokenAst CreateToken(CompilerContext context, ISourceStream source)
 {
   string lexeme = source.GetLexeme();
   TokenAst token = TokenAst.Create(this, context, source.TokenStart, lexeme, lexeme);
   return token;
 }