public void analyze(string outputFileName, Token[] tokens) { this.tokens = tokens; this.tokenIndex = 0; start(); Filling.Write(outputFileName, intermediateCode.ToArray()); }
private void handleOperatorPrecedence(Token token) { while (operatorStack.Count() > 0 && InfixOperators.GetOpInfo.ContainsKey(operatorStack.First().TokenString) && stackOperatorPrecedence(operatorStack.First().TokenString, token.TokenString)) { postFixedTokens.Add(operatorStack.Pop()); } operatorStack.Push(token); }
internal static Token TestForLastValueToken(int listCount, Token token) { if (listCount == 0 && TokenizerRules.CharacterInfo.ContainsKey(token.TokenString) && (TokenizerRules.CharacterInfo[token.TokenString].Type == TokenizerRules.CharType.plusMinus || TokenizerRules.CharacterInfo[token.TokenString].Type == TokenizerRules.CharType.infixOp)){ return new Token(SystemLog.GetLastNumericalValue().ToString(), TokenType.numberLiteral); } return null; }
private Parser(Token[] tokenSet) { this.tokenSet = tokenSet; List<string> tokens = new List<string>(); foreach (Token token in tokenSet) tokens.Add(token.classpart.name); this.tokens = tokens.ToArray(); tokenIndex = 0; }
public static Token TestToChangeLastToken(Token lastToken, Token tokenToAdd) { if (lastToken != null && (TokenizerRules.CharacterInfo.ContainsKey(lastToken.TokenString) && TokenizerRules.CharacterInfo[lastToken.TokenString].Type == TokenizerRules.CharType.plusMinus) && tokenToAdd.TokenType == TokenType.numberLiteral) { return new Token(lastToken.TokenString + tokenToAdd.TokenString, TokenType.numberLiteral); } return null; }
public Scaner(System.IO.StreamReader istream) { buf = new Buffer(istream.BaseStream); try { next_token = Next(); } catch (Scaner.Exception e) { error = e; } }
public void Pass() { while (!IsWhite(buf.Peek()) && buf.Peek() != -1 && buf.Peek() != ';') { buf.Read(); }; try { next_token = Next(); } catch (Scaner.Exception e) { error = e; } }
static void Main(string[] args) { const string sourcecode = "source-code.txt"; const string tokenset = "token-set.txt"; const string intermediatecode = "intermediate-code.txt"; string[] code = Filling.Read(sourcecode); LexicalAnalyzer LA = new LexicalAnalyzer(code); LA.analyze(tokenset); string[] tokens = Filling.Read(tokenset); SyntaxAnalyzer SA = new SyntaxAnalyzer(tokens); int tokenIndex = SA.analyze(); //int tokenIndex = 0; if (tokenIndex == -1) { List<Token> _tokens = new List<Token>(); foreach (string token in tokens) _tokens.Add(new Token(token)); TestSemanticTree.MainSemanticTree.parse(_tokens.ToArray()); List<ErrorRecord> errors= TestSemanticTree.MainSemanticTree.errors; if (errors.ToArray().Length == 0) { ICGTree.MainSyntaxTree.analyze(intermediatecode, _tokens.ToArray()); } else { foreach (ErrorRecord error in errors.ToArray()) Console.WriteLine(error.identifier + "on line# " + error._token.line + "(" + error.type + ")"); } } else { Token token = new Token(tokens[(tokenIndex < tokens.Length) ? tokenIndex : tokenIndex - 1]); string line = code[token.line - 1]; int index = line.IndexOf(token.valuepart); string error = string.Empty; while (index > 0) { index--; error += " "; } error += "^"; Console.WriteLine(line); Console.WriteLine(error); } }
private int GetOperatorPriority(Token t) { switch (t.type) { case Token.Type.OP_STAR: case Token.Type.OP_DIV: case Token.Type.OP_MOD: return 13; case Token.Type.OP_PLUS: case Token.Type.OP_SUB: return 12; case Token.Type.OP_L_SHIFT: case Token.Type.OP_R_SHIFT: return 11; case Token.Type.OP_MORE: case Token.Type.OP_MORE_OR_EQUAL: case Token.Type.OP_LESS: case Token.Type.OP_LESS_OR_EQUAL: return 10; case Token.Type.OP_EQUAL: case Token.Type.OP_NOT_EQUAL: return 9; case Token.Type.OP_BIT_AND: return 8; case Token.Type.OP_XOR: return 7; case Token.Type.OP_BIT_OR: return 6; case Token.Type.OP_AND: return 5; case Token.Type.OP_OR: return 4; case Token.Type.OP_ASSIGN: case Token.Type.OP_BIT_AND_ASSIGN: case Token.Type.OP_BIT_OR_ASSIGN: case Token.Type.OP_DIV_ASSIGN: case Token.Type.OP_L_SHIFT_ASSIGN: case Token.Type.OP_MOD_ASSIGN: case Token.Type.OP_MUL_ASSIGN: case Token.Type.OP_PLUS_ASSIGN: case Token.Type.OP_R_SHIFT_ASSIGN: case Token.Type.OP_SUB_ASSIGN: case Token.Type.OP_XOR_ASSIGN: return 2; case Token.Type.COMMA: return 1; default: return -1; } }
public Token Read() { CheckError(); Token res = next_token; try { next_token = Next(); } catch (Scaner.Exception e) { error = e; } return res; }
//BUG: ans + 1 fails /// <summary>Adds a (post fixed)token to the ParseTree, return a message specifying if the addition was successful</summary> /// <param name="token">Token to add</param> /// <returns>Message about the success of the addition</returns> public ReturnMessage AddToken(Token token) { Node nodeToAdd =null; if (!ParseTreeBuilder.NodeLookup.ContainsKey(token.TokenType)) { throw new Exception("This token type is not possible here"); } else { nodeToAdd = ParseTreeBuilder.NodeLookup[token.TokenType].BuildNode(token); if (nodeToAdd == null) return new ReturnMessage(false); if (nodeToAdd.AppendMeTo == AppendTokenTo.child) { root.AddChild(nodeToAdd); } else if (nodeToAdd.AppendMeTo == AppendTokenTo.parent) { root.AddParent(nodeToAdd); } } return new ReturnMessage(true); }
public int parse(Token[] tokens) { this.tokens = tokens; this.tokenIndex = 0; int index = this.parse(); int i = 1; foreach (VariableRecord record in variableTable.ToArray()) { Console.WriteLine(i++); Console.WriteLine(record.identifier); Console.WriteLine(record.type); Console.WriteLine(record.scope); Console.WriteLine(record.constant); } return index; }
private void addToList(Token token) { if (list.Count > 0) { Token tokenToChangeTo = OperatorInferenceRules.TestToChangeLastToken(AsList().Last(), token); if (tokenToChangeTo != null) { list.RemoveAt(list.Count - 1); token = tokenToChangeTo; } Token inferredToken = OperatorInferenceRules.TestForTokenInference(AsList(),token); if (inferredToken != null) list.Add(inferredToken); } else if (list.Count == 0) { Token inferredToken = OperatorInferenceRules.TestForLastValueToken(AsList().Count, token); if (inferredToken != null) list.Add(inferredToken); } list.Add(token); }
public static Token TestForTokenInference(List<Token> tokens, Token tokenToAdd) { if (tokens.Count() > 0) { //When a minus or plus sign is read as a negative number, add a plus sign before the number //IE: 4 -5 becomes 4 + -5 TokenizerRules.CharInfo lastTknInfo = TokenizerRules.CharacterInfo.GetCharInfo(tokens.Last().TokenString); TokenizerRules.CharInfo currentTknInfo = TokenizerRules.CharacterInfo.GetCharInfo(tokenToAdd.TokenString); TokenizerRules.CharInfo lastCharInfo = TokenizerRules.CharacterInfo.GetCharInfo(tokenToAdd.TokenString[0].ToString()); if ((tokenToAdd.TokenType == TokenType.numberLiteral) && (tokens.Last().TokenType == TokenType.numberLiteral || (lastTknInfo != null && lastTknInfo.Type == TokenizerRules.CharType.closedBrace)) || ((Keywords.KeywordLookup.ContainsKey(tokens.Last().TokenString.ToLower()) && Keywords.KeywordLookup[tokens.Last().TokenString.ToLower()].TokenType == TokenType.numberLiteral) && (lastCharInfo != null && lastCharInfo.Type == TokenizerRules.CharType.plusMinus))) { return new Token("+", TokenType.operatorOrPunctuation); } //Infer a multiplication sign between two sets of parenthesis //IE: (3)(5) becomes (3)*(5) if ((TokenizerRules.CharacterInfo.ContainsKey(tokenToAdd.TokenString) && TokenizerRules.CharacterInfo.ContainsKey(tokens.Last().TokenString)) && TokenizerRules.CharacterInfo[tokenToAdd.TokenString].Type == TokenizerRules.CharType.openBrace && TokenizerRules.CharacterInfo[tokens.Last().TokenString].Type == TokenizerRules.CharType.closedBrace) { return new Token("*", TokenType.operatorOrPunctuation); } //Infer a multiplication sign between parenthesis and a number (that doesn't start with a minus sign) //IE: 3(4) becomes 3*(4) if (TokenizerRules.CharacterInfo.ContainsKey(tokenToAdd.TokenString) && TokenizerRules.CharacterInfo[tokenToAdd.TokenString].Type == TokenizerRules.CharType.openBrace && tokens.Last().TokenType == TokenType.numberLiteral) { return new Token("*", TokenType.operatorOrPunctuation); } //IE: (4)3 becomes (4)*3 if (TokenizerRules.CharacterInfo.ContainsKey(tokens.Last().TokenString) && tokenToAdd.TokenType == TokenType.numberLiteral && TokenizerRules.CharacterInfo[tokens.Last().TokenString].Type == TokenizerRules.CharType.closedBrace && tokenToAdd.TokenString[0] != '-') { return new Token("*", TokenType.operatorOrPunctuation); } } return null; }
internal ErrorRecord(string identifier, string type, Token token) { this.identifier = identifier; this.type = type; this.token = token; }
private static Node buildIdentifierNode(Token token) { Node nodeToReturn = null; if (Functions.FunctionLookup.ContainsKey(token.TokenString.ToLower())) { nodeToReturn = new FunctionNode(token.TokenString); nodeToReturn.AppendMeTo = AppendTokenTo.parent; } else if (Keywords.KeywordLookup.ContainsKey(token.TokenString.ToLower())) { nodeToReturn = Keywords.KeywordLookup[token.TokenString.ToLower()].BuildMethod(); nodeToReturn.AppendMeTo = Keywords.KeywordLookup[token.TokenString.ToLower()].AppendTokenTo; } return nodeToReturn; }
private static Node buildNumberNode(Token token) { double parsedVal; Node nodeToReturn; if (double.TryParse(token.TokenString, out parsedVal)) { nodeToReturn = new NumberNode(parsedVal); nodeToReturn.AppendMeTo = AppendTokenTo.child; return nodeToReturn; } else return null; }
private Token CheckToken(Token t, Token.Type type, bool get_next_token = false) { Syntax.Object.CheckToken(t, type, "требуется \"" + Token.Types[type].ToString() + '"'); return get_next_token ? scan.Read() : t; }
private void run(string[] line) { foreach(string word in line) { Token token = new Token(); token.Position = tokenList.Count + 1; token.Text = word; token.Level = this.level; token.LineNumber = this.lineNumber; if (!this.map.ContainsKey(word)) { int wordNumeric; bool result = int.TryParse(word, out wordNumeric); if (result) { token.TokenType = TokenType.NUMBER; } else { token.TokenType = TokenType.IDENTIFIER; } } else { this.handleStack(this.map[word]); token.TokenType = this.map[word]; this.handlePartner(token); } this.tokenList.AddLast(token); } }
private void PassExpr(String stop_chars = ";") { Token t = new Token(); int count_br = 0; do { try { t = scan.Read(); if (t.type == Token.Type.LBRACE) { count_br++; } else if (t.type == Token.Type.RBRACE) { count_br--; } } catch (Scaner.Exception e) { scan.Pass(); logger.Add(e); } } while (!(t.type == Token.Type.EOF || (t.GetStrVal().IndexOf(stop_chars) != -1 && !this.parse_args_declaraton) || (this.parse_block && (t.type == Token.Type.RBRACE || count_br <= 0)) || (this.parse_args_declaraton && (t.type == Token.Type.COMMA || scan.Peek().type == Token.Type.RPAREN)) )); }
private static Node buildOperatorOrPunctuationNode(Token token) { Node nodeToReturn = null; if (InfixOperators.GetOpInfo.ContainsKey(token.TokenString)) { nodeToReturn = new TwoParameterOperatorNode(token.TokenString); nodeToReturn.AppendMeTo = AppendTokenTo.parent; } return nodeToReturn; }
internal Token PublishToken() { Token tokenToReturn = new Token(tokenString, currentTokenType); tokenString = string.Empty; currentTokenType = TokenType.none; return tokenToReturn; }
static void Main(string[] args) { StreamReader istr = new StreamReader(Console.OpenStandardInput(), Console.InputEncoding); StreamWriter ostr = new StreamWriter(System.Console.OpenStandardOutput(), Console.OutputEncoding); if (args.Count() < 2) { Help(); return; } bool low_opt = false, hight_opt = false; int index_in = 2, index_out = 3; switch (args[1]) { case "-low": low_opt = true; break; case "-hight": hight_opt = true; break; case "-low-hight": low_opt = hight_opt = true; break; default: index_in = 1; index_out = 2; break; } istr = new StreamReader(args[index_in]); if (index_out < args.Count()) { ostr = new StreamWriter(args[index_out]); } Scaner scaner = null; Parser parser = null; Fasm.CodeGen codegen = null; switch (args[0]) { case "-l": scaner = new Scaner(istr); Token t = null; while (t == null || t.type != Token.Type.EOF) { try { t = scaner.Read(); Console.WriteLine(t.ToString()); } catch (Scaner.Exception e) { ostr.WriteLine(e.Message); } } break; case "-p": parser = new Parser(new Scaner(istr)); parser.Parse(); parser.PrintTree(ostr); ostr.WriteLine(parser.logger.ToString()); break; case "-c": parser = new Parser(new Scaner(istr)); parser.Parse(); if (!parser.logger.isEmpty()) { ostr.WriteLine(parser.logger.ToString()); break; } codegen = new Fasm.CodeGen(parser.tstack); codegen.Generate(ostr); break; case "-cexe": parser = new Parser(new Scaner(istr)); parser.Parse(); if (!parser.logger.isEmpty()) { ostr.WriteLine(parser.logger.ToString()); break; } codegen = new Fasm.CodeGen(parser.tstack); codegen.Generate(ostr); ostr.Flush(); ostr.Close(); if (index_out < args.Count()) { Process.Start(new ProcessStartInfo { FileName = "C:/fasm/fasm.exe", WindowStyle = ProcessWindowStyle.Hidden, Arguments = string.Format("{0} {1}", args[index_out], args[index_out]) }); } break; default: Help(); return; } istr.Close(); ostr.Close(); }
public void analyze(string outputFileName) { bool MultiLineComment = false; List<Token> tokens = new List<Token>(); for (int lineCount = 0; lineCount < code.Length; lineCount++) { bool String = false; string StringConstant = string.Empty; bool Char = false; string CharConstant = string.Empty; bool SingleLineComment = false; string line = code[lineCount]; if (line == string.Empty) continue; if (MultiLineComment) if (line.Contains("-|")) { int index = line.IndexOf("-|"); line = line.Substring(index + 2, (line.Length - index - 2)); MultiLineComment = false; } else continue; string[] words = line.Split(' '); if (words.Length == 0) continue; for (int wordCount = 0; wordCount < words.Length; wordCount++) { string word = words[wordCount]; if (word == string.Empty) continue; if (SingleLineComment) continue; if (MultiLineComment) if (word.Contains("-|")) { int index = word.IndexOf("-|"); Console.WriteLine(word.Length); Console.WriteLine(index); word = word.Substring(index + 2, (word.Length - index - 2)); Console.WriteLine(word); MultiLineComment = false; } else continue; if (String) if (word.Contains('"')) { StringConstant += " "; int index = word.IndexOf('"'); StringConstant += word.Substring(0, index + 1); word = word.Substring(index + 1, (word.Length - 1 - index)); Token token = new Token(ClassPart.StringConstant, StringConstant, (lineCount + 1)); tokens.Add(token); StringConstant = string.Empty; String = false; if (word == string.Empty) continue; } else { StringConstant += " " + word; continue; } if (Char) { CharConstant += ' '; } if (RegularExpression.ValidateFloat(word)) { Token token = new Token(ClassPart.classPart(word), word, (lineCount + 1)); tokens.Add(token); continue; } else if (Language.containsBreaker(word)) { string _word = string.Empty; for (int letterCount = 0; letterCount < word.Length; letterCount++) { char letter = word[letterCount]; string __word = word.Substring(letterCount, word.Length - letterCount); if (MultiLineComment) { if (!(letter == '-')) continue; } if (SingleLineComment) continue; if (Language.isPunctuator(letter)) { switch (letter) { case '.': if (String) { StringConstant += letter; continue; } if ((_word == string.Empty) || Regex.IsMatch(_word, @"^[0-9]+$")) { string _constant = _word + letter; string _LAD = string.Empty; int _i = 1; while (letterCount + _i < word.Length && Regex.IsMatch(word[letterCount + _i].ToString(), @"^[0-9]$")) { _LAD += word[letterCount + _i]; _i++; } if (Regex.IsMatch(_LAD, @"^[0-9]+$")) { _constant = _constant + _LAD; Token t = new Token(ClassPart.classPart(_constant), _constant, (lineCount + 1)); tokens.Add(t); letterCount += _i - 1; _word = string.Empty; continue; } else { if (!(_word == string.Empty)) { Token _t = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_t); } Token __t = new Token(letter, (lineCount + 1)); tokens.Add(__t); _word = string.Empty; } } else { if (!(_word == string.Empty)) { Token _t = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_t); } Token __t = new Token(letter, (lineCount + 1)); tokens.Add(__t); _word = string.Empty; } break; case '{': case '}': case '(': case ')': case ',': case ':': case '[': case ']': case ';': if (String) { StringConstant += letter; continue; } if (!(_word == string.Empty)) { Token _token = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_token); _word = string.Empty; } Token token = new Token(letter, (lineCount + 1)); tokens.Add(token); break; case '<': case '>': case '!': case '=': case '*': case '%': if (String) { StringConstant += letter; continue; } if (!(_word == string.Empty)) { Token _token = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_token); _word = string.Empty; } if (((letterCount + 1) < word.Length) && (word[letterCount + 1] == '=')) { string temporary = letter.ToString() + '='; Token _token = new Token(ClassPart.classPart(temporary), temporary, (lineCount + 1)); tokens.Add(_token); letterCount++; } else { Token _token = new Token(ClassPart.classPart(letter.ToString()), letter.ToString(), (lineCount + 1)); tokens.Add(_token); } break; case '/': if (String) { StringConstant += letter; continue; } if (!(_word == string.Empty)) { Token _token = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_token); _word = string.Empty; } if (((letterCount + 1) < word.Length) && (word[letterCount + 1] == '=')) { string temporary = letter.ToString() + '='; Token _token = new Token(ClassPart.classPart(temporary), temporary, (lineCount + 1)); tokens.Add(_token); letterCount++; } else if (((letterCount + 1) < word.Length) && (word[letterCount + 1] == '/')) { SingleLineComment = true; continue; } else { string temporary = letter.ToString(); Token _token = new Token(ClassPart.classPart(temporary), temporary, (lineCount + 1)); tokens.Add(_token); } break; case '+': case '-': if (MultiLineComment) { if (letter == '-' && letterCount + 1 < word.Length && word[letterCount + 1] == '|') { MultiLineComment = false; letterCount += 1; continue; } } if (String) { StringConstant += letter; continue; } if (!(_word == string.Empty)) { Token _token = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_token); _word = string.Empty; } if (((letterCount + 1) < word.Length) && (word[letterCount + 1] == '=')) { string temporary = letter.ToString() + '='; Token _token = new Token(ClassPart.classPart(temporary), temporary, (lineCount + 1)); tokens.Add(_token); letterCount++; } else if (((letterCount + 1) < word.Length) && (word[letterCount + 1] == letter)) { string temporary = letter.ToString() + letter; Token _token = new Token(ClassPart.classPart(temporary), temporary, (lineCount + 1)); tokens.Add(_token); letterCount++; } else { string temporary = letter.ToString(); Token _token = new Token(ClassPart.classPart(temporary), temporary, (lineCount + 1)); tokens.Add(_token); } break; case '&': case '|': if (letter == '|' && letterCount + 1 < word.Length && word[letterCount + 1] == '-') { MultiLineComment = true; letterCount += 1; continue; } if (String) { StringConstant += letter; continue; } if (!(_word == string.Empty)) { Token _token = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_token); _word = string.Empty; } if (((letterCount + 1) < word.Length) && (word[letterCount + 1] == letter)) { string temporary = letter.ToString() + letter; Token _token = new Token(ClassPart.classPart(temporary), temporary, (lineCount + 1)); tokens.Add(_token); letterCount++; } else { string temporary = letter.ToString(); Token _token = new Token(ClassPart.classPart(temporary), temporary, (lineCount + 1)); tokens.Add(_token); } break; case '"': if (!(_word == string.Empty)) { Token _token = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_token); _word = string.Empty; } if (String) { StringConstant += letter; Token _token = new Token(ClassPart.StringConstant, StringConstant, (lineCount + 1)); tokens.Add(_token); StringConstant = string.Empty; String = false; continue; } else { StringConstant += letter; String = true; } break; case '\'': if (String) { StringConstant += letter; continue; } if (!(_word == string.Empty)) { Token _token = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(_token); _word = string.Empty; } if (Char) { CharConstant += letter; Char = false; Token _token = new Token(ClassPart.classPart(CharConstant), CharConstant, (lineCount + 1)); tokens.Add(_token); CharConstant = string.Empty; } else { if (!(letterCount + 1 < word.Length)) { if (!(wordCount + 1 < words.Length)) { Token ___token = new Token(ClassPart.classPart("'"), "'", (lineCount + 1)); tokens.Add(___token); Char = false; CharConstant = string.Empty; continue; } else { CharConstant += letter; Char = true; continue; } } int _index = 1; string _temporary = letter.ToString(); while ((letterCount + _index < word.Length) && word[letterCount + _index] != '\'' && _index < 3) { _temporary += word[letterCount + _index]; _index++; } if (_temporary.Length == 1) { Token ___token = new Token(ClassPart.classPart(_temporary), _temporary, (lineCount + 1)); tokens.Add(___token); CharConstant = string.Empty; Char = false; continue; } if ((!(_temporary[1] == '\\')) && (_temporary.Length > 3)) { Token ___token = new Token(ClassPart.classPart("'"), "'", (lineCount + 1)); tokens.Add(___token); Char = false; CharConstant = string.Empty; continue; } if ((letterCount + _index < word.Length) && word[letterCount + _index] == '\'') _temporary += '\''; Token __token = new Token(ClassPart.classPart(_temporary), _temporary, (lineCount + 1)); tokens.Add(__token); letterCount += ((_index < 3) || (_temporary[_temporary.Length - 1] == '\'')) ? _index : _index - 1; } break; default: if (String) { StringConstant += letter; break; } _word += letter; break; } } else { if (String) { StringConstant += letter; continue; } if (Char) { Token __token = new Token(ClassPart.classPart("'"), "'", (lineCount + 1)); tokens.Add(__token); CharConstant = string.Empty; Char = false; } _word += letter; } } if (!(_word == string.Empty)) { Token token = new Token(ClassPart.classPart(_word), _word, (lineCount + 1)); tokens.Add(token); _word = string.Empty; } } else { if (Char) { Token _token = new Token(ClassPart.classPart("'"), "'", (lineCount + 1)); tokens.Add(_token); CharConstant = string.Empty; Char = false; } Token token = new Token(ClassPart.classPart(word), word, (lineCount + 1)); tokens.Add(token); } } //Token lineBreak = new Token(new ClassPart("line-break", new string[] {}), "", (lineCount + 1)); //tokens.Add(lineBreak); } Token[] array = tokens.ToArray(); string[] tokenset = new string[array.Length]; for (int index = 0; index < array.Length; index++) tokenset[index] = array[index].token; Filling.Write(outputFileName, tokenset); }
public static void SetUpMainParser(Token[] tokenSet) { MainParser = new Parser(tokenSet); }
private bool IsType(Token t) { switch (t.type) { case Token.Type.KW_STRUCT: case Token.Type.KW_ENUM: return true; default: Symbols.Type tt = null; if (tstack.ContainsType(t.GetStrVal())) { tt = tstack.GetType(t.GetStrVal()); } return tt != null && !(tt is Symbols.Func); } }
//common:释放Token,每次重新运行时都需要 public void rebuiltToken() { cACharacter = Convert.ToChar(32); cAC_int = 32; this.myToken = new Token(); }
private void handlePartner(Token token) { switch (token.TokenType) { case TokenType.STRINGOPENCLOSE: if(this.stringList.Count > 0) { token.Partner = this.stringList[0]; this.tokenList.Find(this.stringList[0]).Value.Partner = token; this.stringList.RemoveAt(0); } else { this.stringList.Add(token); } break; } }