public Token NextToken()
        {
            State = LexerState.Ready;
            char c;
            while (StringEnd != Input.Length)
            {
                c = read();
                switchState(c);
                if (State == LexerState.Accepted)
                {
                    Token T = Accept(AcceptType);
                    if (T.Type != TokenType.Token_SKIP)
                        return T;
                }
            }

            if (State == LexerState.Ready)
            {
                Token T = new Token();
                T.Lexeme = "";
                return T;
            }
            else
                throw new LexerException("End of file reached");
        }
        public void Check()
        {
            for (int i = 0; i != TokensList.Count() - 1; i++)
                {
                    CurrentToken = TokensList[i];
                    NextToken = TokensList[i + 1];

                    if (CurrentToken.Type == NextToken.Type)
                    {
                        if (CurrentToken.Lexeme != "{" && CurrentToken.Lexeme != "}" && CurrentToken.Lexeme != "(" && CurrentToken.Lexeme != ")")
                            Error = true;
                        break;

                    }

                    else if (CurrentToken.Type == TokenType.Token_INT_LITERAL && NextToken.Type == TokenType.Token_IDENTIFIER || CurrentToken.Type == TokenType.Token_INT_LITERAL && NextToken.Lexeme == "}" || CurrentToken.Type == TokenType.Token_INT_LITERAL && NextToken.Lexeme == "{")
                          {
                              Error = true;
                              break;
                          }

                    else if (CurrentToken.Type == TokenType.Token_INT_LITERAL && NextToken.Lexeme == "=")
                    {
                        Error = true;
                        break;
                    }
                    else if (CurrentToken.Lexeme != ";" && NextToken.Type == TokenType.Token_RESERVEDWORD)
                    {
                        if (CurrentToken.Lexeme != "{" && CurrentToken.Lexeme != "}"

                            && CurrentToken.Lexeme != "(" && CurrentToken.Lexeme != ")")
                        {
                            Error = true;
                            break;
                        }
                    }
                     else if (CurrentToken.Lexeme != ";" && NextToken.Type == TokenType.Token_EOF)

                    {
                        if (CurrentToken.Lexeme == "}")
                            continue;
                        else
                       {
                            Error = true;
                            break;
                        }
                    }

            }
              if (Error)
              {
                  throw new ParseException("Syntax Error.");
              }
        }
 protected Token Accept(TokenType Type)
 {
     string Lexeme = Input.Substring(StringBegin, StringEnd - StringBegin);
     StringBegin = StringEnd;
     Token Temp = new Token();
     Temp.Lexeme = Lexeme;
     Temp.Type = Type;
     State = LexerState.Ready;
     if (Temp.Lexeme == "if" || Temp.Lexeme == "while" || Temp.Lexeme == "\r\nif" || Temp.Lexeme == "\r\nwhile")
         Temp.Type = TokenType.Token_RESERVEDWORD;
     TokensList.Add(Temp);
     return Temp;
 }
 protected bool Match(TokenType TokenType)
 {
     if (Lookahead.Type == TokenType)
         {
             if (MoreTokens())
             Lookahead = NextToken();
             return true;
         }
     else
     {
         throw new ParseException("Expected " + " " + TokenType.ToString());
         return false;
     }
 }
        protected bool Match(string Lexeme)
        {
            if (Lookahead.Lexeme == Lexeme)
            {
                if (MoreTokens())
                {
                    Lookahead = NextToken();

                }
                else
                {
                    Lookahead = new Token();
                    Lookahead.Type =TokenType.Token_EOF;
                }
                return true;
            }
            else
            {
                throw new ParseException("Expected " + " " + Lexeme);
                return false;
            }
        }
 public void appendEOF()
 {
     Token temp = new Token(TokenType.Token_EOF, "");
     TokensList.Add(temp);
 }
 public SyntaxChecker(List<Token> TokensList)
 {
     this.TokensList = TokensList;
     CurrentToken = new Token();
     NextToken = new Token();
 }