//public Token() { } public Token(int linenr, int posnr, TokenType token, string value, int level, Token partner) { LineNr = linenr; PositionNr = posnr; Type = token; Value = value; Level = level; Partner = partner; }
private List<Token> tokenize(string[] lines) { int posNr = 0; int lineNr = 0; int level = 0; foreach(string line in lines) { string cleanLine = Regex.Replace(line, @"\t|\n|\r", ""); lineNr++; // escape new line if (cleanLine == "") continue; string[] words = cleanLine.Split(' '); for (int i = 0; i < words.Length; i++) { switch (words[i]) { case "print": tokens.Add(new Token(lineNr, posNr, TokenType.PRINT, "print", level, null)); break; case "for": tokens.Add(new Token(lineNr, posNr, TokenType.FOR, "for", level, null)); break; case "while": tokens.Add(new Token(lineNr, posNr, TokenType.WHILE, "while", level, null)); break; case "if": tokens.Add(new Token(lineNr, posNr, TokenType.IF, "if", level, null)); ifTokens.Push(tokens.LastOrDefault()); break; case "else": if (ifTokens.Count == 0) { Console.WriteLine("Error: IF statement not found"); } else { Token partner = ifTokens.Pop(); tokens.Add(new Token(lineNr, posNr, TokenType.ELSE, "else", level, partner)); partner.Partner = tokens.LastOrDefault(); } break; case "{": level++; Token token = new Token(lineNr, posNr, TokenType.BRACKETOPEN, "{", level, null); needsClosure.Push(token); tokens.Add(token); break; case "}": if (needsClosure.Peek().Type != TokenType.BRACKETOPEN) { Console.WriteLine("Error: } not found"); } else { level--; Token partner = needsClosure.Pop(); tokens.Add(new Token(lineNr, posNr, TokenType.BRACKETCLOSE, "}", level, partner)); partner.Partner = tokens.LastOrDefault(); } break; case "(": tokens.Add(new Token(lineNr, posNr, TokenType.ELIPSISOPEN, "(", level, null)); needsClosure.Push(tokens.LastOrDefault()); break; case ")": if (needsClosure.Peek().Type != TokenType.ELIPSISOPEN) { Console.WriteLine("Error: ( not found"); } else { Token partner = needsClosure.Pop(); tokens.Add(new Token(lineNr, posNr, TokenType.ELIPSISCLOSE, ")", level, partner)); partner.Partner = tokens.LastOrDefault(); } break; case ";": tokens.Add(new Token(lineNr, posNr, TokenType.SEMICOLON, ";", level, null)); break; case "=": tokens.Add(new Token(lineNr, posNr, TokenType.EQUALS, "=", level, null)); break; case "==": tokens.Add(new Token(lineNr, posNr, TokenType.COMPARE, "==", level, null)); break; case "<=": tokens.Add(new Token(lineNr, posNr, TokenType.LESSEREQUALS, "<=", level, null)); break; case ">=": tokens.Add(new Token(lineNr, posNr, TokenType.GREATEREQUALS, ">=", level, null)); break; case "!=": tokens.Add(new Token(lineNr, posNr, TokenType.NOTEQUALS, "!=", level, null)); break; case "+": tokens.Add(new Token(lineNr, posNr, TokenType.PLUS, "+", level, null)); break; case "-": tokens.Add(new Token(lineNr, posNr, TokenType.MINUS, "-", level, null)); break; case "*": tokens.Add(new Token(lineNr, posNr, TokenType.MULTIPLY, "*", level, null)); break; case "/": tokens.Add(new Token(lineNr, posNr, TokenType.DIVIDE, "/", level, null)); break; case "++": tokens.Add(new Token(lineNr, posNr, TokenType.INCREMENT, "++", level, null)); break; case "--": tokens.Add(new Token(lineNr, posNr, TokenType.DECREMENT, "--", level, null)); break; default: string temp = Regex.Replace(words[i], @";", ""); int value; if (int.TryParse(temp, out value)) { tokens.Add(new Token(lineNr, posNr, TokenType.NUMBER, value.ToString(), level, null)); } else { tokens.Add(new Token(lineNr, posNr, TokenType.IDENTIFIER, temp, level, null)); } break; } posNr = posNr + words[i].Length + 1; } } if(needsClosure.Count != 0) { Console.WriteLine("Compile error: \"} or ) \" missing!"); } connectTokens(); return tokens; }
public static Color GetColor(Token token) { if (!s_colors.ContainsKey(token.Type)) return Color.Black; return s_colors[token.Type]; }
public void AddAstStringLiteralValueNode(Token t) { var node = new AstStringLiteralExpression(t.Attribute); PushNode(node); }
public void AddAstIntegerValueNode(Token t) { var node = new AstIntegerValueExpression(t.Attribute); PushNode(node); }
public void AddAstIdNode(Token t) { var node = new AstIdExpression(t.Attribute); PushNode(node); }
private Token SendBlockClosingTokens(Token currToken) { if (currIndentationLevel > 0) { var result = new Token(TokenType.BLOCK_END, currIndentationLevel.ToString()); LeaveBlock(); return result; } return currToken; }
private Token ReadBlockStart(Token currToken) { currToken = GetNextNotSpace(); if (currToken.Type != TokenType.LINE_END) { return GetErrorToken("Expected line ending"); } EnterBlock(); try { ReadIndents(); } catch (IndentationException e) { return GetErrorToken(e.Message); } return new Token(TokenType.BLOCK_START, currIndentationLevel.ToString()); }
private Token CheckIndentation(Token currToken) { ushort counter = 0; Token t; do { t = baseScanner.GetForwardToken(); if (t.Type == TokenType.SPACE) { baseScanner.GetNextToken(); ++counter; } else if (t.Type == TokenType.LINE_END) { baseScanner.GetNextToken(); counter = 0; } } while (t.Type == TokenType.SPACE || t.Type == TokenType.LINE_END); // empty line - skip if (counter == GetSpacesCount()) { nextTokens.Enqueue(baseScanner.GetNextToken()); // right level - save next token return currToken; } if ((counter < GetSpacesCount()) && ((counter % indentSize) == 0)) // level decreased - if more than 1 level - store in stack and return blockend { var diff = Convert.ToInt16((GetSpacesCount() - counter) / indentSize); for (var i = 0; i < diff; ++i) { counter -= indentSize; var result = new Token(TokenType.BLOCK_END, currIndentationLevel.ToString()); LeaveBlock(); nextTokens.Enqueue(result); } return currToken; } return GetErrorToken("Wrong indentation level."); // wrong level - return error }
public Token scan() { for (; ; readch()) { if (peek == ' ' || peek == '\t') continue; else if (peek == '\n') line++; else break; } if (peek == '/') { readch(); if (peek == '/') { while (peek != '\n') readch(); line++; } else if (peek == '*') { readch(); while (true) { if (peek == '*') { readch(); if (peek == '/') break; } else if (peek == 65535) return new Word("_THE_END_", Tag._THE_END_); else readch(); } } else return new Token('/'); } /* switch (peek) { case '&': if (readch('&')) return Word.and; else return new Token('&'); case '|': if (readch('|')) return Word.or; else return new Token('|'); case '=': if (readch('=')) return Word.eq; else return new Token('='); case '!': if (readch('=')) return Word.ne; else return new Token('!'); case '<': if (readch('=')) return Word.le; else return new Token('<'); case '>': if (readch('=')) return Word.ge; else return new Token('>'); } */ if (Char.IsDigit(peek)) { int v = 0; do { v = 10 * v + peek; readch(); } while (Char.IsDigit(peek)); if (peek != '.') return new Num(v); double x = v; double d = 10.0; for (; ; ) { readch(); if (!Char.IsDigit(peek)) break; x = x + peek / d; d *= 10; } return new Real(x); } if (Char.IsLetter(peek) || peek == '.') { StringBuilder b = new StringBuilder(); do { if (peek == '.') b.Append('_'); else b.Append(peek); readch(); } while (Char.IsLetter(peek) || peek == '_'); string s = b.ToString(); Word w = (Word)words[s]; if (w != null) return w; w = new Word(s, Tag.ID); words.Add(s, w); return w; } Token token = new Token(peek); peek = ' '; return token; }
public static bool IsCorrectToken(Token t) { return (t.Type != TokenType.EOF) && (t.Type != TokenType.ERROR); }
public virtual bool IsMatch(Token currentToken) { return false; }
public virtual void Compile(Token currentToken) { }