private bool ScanNumber() { bool is_number = false; // this.Val = this.next_val_; if (Char.IsNumber(input_sym_)) { is_number = true; this.Val = input_sym_ - '0'; Next(); } while (Char.IsNumber(input_sym_)) { is_number = true; this.Val = 10 * this.Val + input_sym_ - '0'; Next(); } if (is_number) { current_token_ = TokenHelper.Tokens.kNumber; } return(is_number); }
public Scanner(string file_name) // opoen file and scan the first token into .input_sym_ { identifier_table = new List <string>(); this.file_reader_ = new FileReader(file_name); this.current_token_ = TokenHelper.Tokens.kNullToken; Next(); GetSym(); }
public bool EatToken(TokenHelper.Tokens token_enum) { Next(); if (scanner_sym_ != token_enum) { Error("Expected " + Enum.GetName(typeof(TokenHelper.Tokens), token_enum)); return(false); } return(true); }
private bool ScanIdentifier() { StringBuilder sb = new StringBuilder(); if (!Char.IsLetter(input_sym_)) { return(false); } sb.Append(input_sym_); Next(); while (Char.IsNumber(input_sym_) || (Char.IsLetter(input_sym_) && input_sym_ < (char)128)) { sb.Append(input_sym_); Next(); } string token_str = sb.ToString(); // try to match with a keyword if (TokenHelper.MatchKeyword(token_str, out current_token_)) { return(true); } int pos = identifier_table.IndexOf(sb.ToString()); if (pos < 0) { pos = identifier_table.Count; identifier_table.Add(sb.ToString()); } // Id = next_id_; Id = pos; current_token_ = TokenHelper.Tokens.kIdent; return(true); }
public string Token2String(TokenHelper.Tokens token) { string key_str = TokenHelper.TOKENSET.FirstOrDefault(x => x.Value == token).Key; return(key_str); }
public TokenHelper.Tokens GetSym() // public int sym; // the current token on the input, 0 = error token, 255 = end-of-file token { TokenHelper.Tokens r = current_token_; if (r == TokenHelper.Tokens.kEndofToken) { Next(); return(r); } if (!SkipSpace()) { current_token_ = TokenHelper.Tokens.kEndofToken; return(r); } if (input_sym_ == FileReader.kEndSymbol) { current_token_ = TokenHelper.Tokens.kEndofToken; return(r); } if (ScanIdentifier() != false) { return(r); } if (ScanNumber() != false) { return(r); } switch (input_sym_) { case '/': { Next(); if (input_sym_ == '/') { //Next(); file_reader_.SkipLine(); Next(); GetSym(); // OR: SkipCommentAndSpace return(r); } else { current_token_ = TokenHelper.Tokens.kDivToken; return(r); } break; } case '=': { Next(); if (input_sym_ == '=') { Next(); current_token_ = TokenHelper.Tokens.kEqlToken; return(r); } else { Error(@"'=' should be followed by '='"); current_token_ = TokenHelper.Tokens.kErrorToken; return(r); } break; } case '!': { Next(); if (input_sym_ == '=') { Next(); current_token_ = TokenHelper.Tokens.kNeqToken; return(r); } else { Error(@"'!' should be followed by '='"); current_token_ = TokenHelper.Tokens.kErrorToken; return(r); } break; } case '>': { Next(); if (input_sym_ == '=') { Next(); current_token_ = TokenHelper.Tokens.kGeqToken; return(r); } else { current_token_ = TokenHelper.Tokens.kGtrToken; return(r); } break; } case '<': { Next(); if (input_sym_ == '=') { Next(); current_token_ = TokenHelper.Tokens.kLeqToken; return(r); } else if (input_sym_ == '-') { Next(); current_token_ = TokenHelper.Tokens.kBecomesToken; return(r); } else { current_token_ = TokenHelper.Tokens.kIssToken; return(r); } break; } case '+': { Next(); current_token_ = TokenHelper.Tokens.kPlusToken; return(r); } case '-': { Next(); current_token_ = TokenHelper.Tokens.kMinusToken; return(r); } case '*': { Next(); current_token_ = TokenHelper.Tokens.kTimesTocken; return(r); } case '.': { Next(); current_token_ = TokenHelper.Tokens.kPeriodToken; return(r); } case ',': { Next(); current_token_ = TokenHelper.Tokens.kCommaToken; return(r); } case ';': { Next(); current_token_ = TokenHelper.Tokens.kSemiToken; return(r); } case '(': { Next(); current_token_ = TokenHelper.Tokens.kOpenparenToken; return(r); } case ')': { Next(); current_token_ = TokenHelper.Tokens.kCloseparenToken; return(r); } case '[': { Next(); current_token_ = TokenHelper.Tokens.kOpenbracketToken; return(r); } case ']': { Next(); current_token_ = TokenHelper.Tokens.kClosebracketToken; return(r); } case '{': { Next(); current_token_ = TokenHelper.Tokens.kBeginToken; return(r); } case '}': { Next(); current_token_ = TokenHelper.Tokens.kEndToken; return(r); } } { current_token_ = TokenHelper.Tokens.kErrorToken; } return(r); }
private void Next() { scanner_sym_ = scanner_.GetSym(); // advance to the next token }