public override Token tokenize(Tokenizer t) { Token token = new Token(); token.type = "Hex"; token.value = ""; token.position = t.currentPos; token.lineNumber = t.lineNumber; string hexallow = "abcdef123456789"; if (t.peek() == '#') { token.value += t.next(); while ((t.hasNext() && hexallow.Contains(t.peek().ToString())) && token.value.Length < 7) { token.value += t.next(); } } if (token.value.Length < 7) { for (int i = 0; token.value.Length != 7; i++) { token.value += '0'; } } return(token); }
public override Token tokenize(Tokenizer t) { Token token = new Token(); token.value = ""; token.type = "open tag xml"; token.position = t.currentPos; token.lineNumber = t.lineNumber; token.value += t.next(); while (t.hasNext() && (Char.IsLetterOrDigit(t.peek()) || t.peek() == '>' || t.peek() == '/')) { if (t.peek() == '/') { if (token.value == "<") { token.type = "close tag xml"; } else { return(null); } } token.value += t.next(); if (t.peek() == '>') { token.value += t.next(); break; } } return(token); }
public override Token tokenize(Tokenizer t) { Token token = new Token(); token.value = ""; token.type = "Hash"; token.position = t.currentPos; token.lineNumber = t.lineNumber; token.value += t.next(); while (t.hasNext() && (Char.IsLetterOrDigit(t.peek()))) { token.value += t.next(); } return(token); }
public override Token tokenize(Tokenizer t) { Token token = new Token(); token.type = "Bracts"; token.value = ""; token.position = t.currentPos; token.lineNumber = t.lineNumber; token.value += t.next(); return(token); }
public override Token tokenize(Tokenizer t) { Token token = new Token(); token.value = ""; token.type = "One Line Comments"; token.position = t.currentPos; token.lineNumber = t.lineNumber; while (t.hasNext() && t.peek() != '\n') { token.value += t.next(); } return(token); }
public override Token tokenize(Tokenizer tokenizer) { var token = new Token(); token.value = ""; token.type = "space"; token.position = tokenizer.currentPos + 1; token.lineNumber = tokenizer.lineNumber; while (tokenizer.hasNext() && Char.IsWhiteSpace(tokenizer.peek())) { token.value += tokenizer.next(); } return(token); }
public override Token tokenize(Tokenizer t) { Token token = new Token(); token.value = ""; token.type = "Relational operator"; token.position = t.currentPos; token.lineNumber = t.lineNumber; while (t.hasNext() && OpLen > 0) { token.value += t.next(); OpLen--; } return(token); }
public override Token tokenize(Tokenizer t) { Token token = new Token(); token.value = ""; token.type = "Multi Line Comments"; token.position = t.currentPos; token.lineNumber = t.lineNumber; while (t.hasNext() && (t.peek() != '*' || t.peek(2) != '/')) { token.value += t.next(); } token.value += "*/"; return(token); }
public override Token tokenize(Tokenizer t) { Token token = new Token(); int count = 0; token.value = ""; token.type = "Integer"; token.position = t.currentPos; token.lineNumber = t.lineNumber; while (t.hasNext() && (char.IsDigit(t.peek()) || t.peek() == '.' || t.peek() == 'f')) { if (t.peek() == '.' || t.peek() == 'f') { token.type = "float"; count++; if (count == 3) { return(null); } } if (t.peek() == 'f') { if (t.peek(2) != ' ') { return(null); } } token.value += t.next(); } Char lastchar = token.value[token.value.Length - 1]; if (lastchar != 'f' && token.type == "float") { token.value += 'f'; } return(token); }
public override Token tokenize(Tokenizer tokenizer) { var token = new Token(); token.value = ""; token.type = "string"; token.position = tokenizer.currentPos + 1; token.lineNumber = tokenizer.lineNumber; while (tokenizer.hasNext()) { char ch = tokenizer.next(); token.value += ch; if (token.value.Length > 1 && ch == '"') { break; } } if (token.value.Length < 2) { tokenizer.currentPos = token.position - 1; return(null); } char ch1 = token.value[token.value.Length - 1], ch2 = token.value[token.value.Length - 2]; if (ch1 == '"' && ch2 != '\\') { return(token); } else { tokenizer.currentPos = token.position - 1; return(null); } }