public virtual TsonToken PeekNext() { if (endToken != null) return endToken; if (nextToken == null) { nextToken = Next(); } return nextToken; }
public TsonParseException(TsonToken token) : this(token, "") { }
public virtual TsonToken Next() { // Are we all done? if (endToken != null) return endToken; TsonToken token = null; // If we already cached a token... if (nextToken != null) { token = nextToken; nextToken = null; return token; } TextLocation tokenLocation; char c = CurrentChar(); if (c == '\0') { tokenLocation = currentLocation; MoveToNextChar(); return (endToken = TsonToken.End(tokenLocation)); } else if (Char.IsWhiteSpace(c)) { tokenLocation = currentLocation; do { MoveToNextChar(); } while (Char.IsWhiteSpace(CurrentChar())); return TsonToken.Whitespace(tokenLocation, SliceInput(ref tokenLocation, ref currentLocation)); } else if (c == '#') { tokenLocation = currentLocation; MoveToNextChar(); while ((c = CurrentChar()) != '\0' && c != '\n') { MoveToNextChar(); } return TsonToken.Comment(tokenLocation, SliceInput(ref tokenLocation, ref currentLocation)); } else if (c == '"') { tokenLocation = currentLocation; StringBuilder sb = new StringBuilder(); StringBuilder sb2 = new StringBuilder(4); sb.Append(c); MoveToNextChar(); while ((c = CurrentChar()) != '\0' && c != '"') { if (c == '\\') { MoveToNextChar(); if ((c = CurrentChar()) == '\0' || !IsTsonControl(c)) return TsonToken.Error(currentLocation); switch (c) { case '"': case '\\': case '/': sb.Append(c); break; case 'b': sb.Append('\b'); break; case 'f': sb.Append('\f'); break; case 'n': sb.Append('\n'); break; case 'r': sb.Append('\r'); break; case 't': sb.Append('\t'); break; case 'u': sb2.Clear(); ushort n; TextLocation digitLocation = currentLocation; for (int i = 0; i < 4; i++) { MoveToNextChar(); if ((c = CurrentChar()) == '\0' || c == '"') return TsonToken.Error(currentLocation); sb2.Append(CurrentChar()); } if (!UInt16.TryParse(sb2.ToString(), NumberStyles.AllowHexSpecifier, null, out n)) return TsonToken.Error(digitLocation); sb.Append((char)n); break; } } else { sb.Append(c); } MoveToNextChar(); } if (c == '"') { sb.Append(c); MoveToNextChar(); return TsonToken.String(tokenLocation, sb.ToString()); } else return TsonToken.Error(currentLocation); } switch ((char)c) { case ',': token = TsonToken.Comma(currentLocation); break; case ':': token = TsonToken.Colon(currentLocation); break; case '[': token = TsonToken.LeftSquareBrace(currentLocation); break; case ']': token = TsonToken.RightSquareBrace(currentLocation); break; case '{': token = TsonToken.LeftCurlyBrace(currentLocation); break; case '}': token = TsonToken.RightCurlyBrace(currentLocation); break; } if (token != null) { MoveToNextChar(); return token; } tokenLocation = currentLocation; for(;;) { MoveToNextChar(); if ((c = CurrentChar()) == '\0' || IsTsonPunctuation(c)) break; if (Char.IsWhiteSpace(c)) { var whitespaceLocation = currentLocation; do { MoveToNextChar(); } while (Char.IsWhiteSpace(CurrentChar())); if ((c = CurrentChar()) == '\0' || IsTsonPunctuation(c)) { // This is trailing whitespace; make it appear as the next token this.nextToken = TsonToken.Whitespace(whitespaceLocation, SliceInput(ref whitespaceLocation, ref currentLocation)); return TsonToken.String(tokenLocation, SliceInput(ref tokenLocation, ref whitespaceLocation)); } // Otherwise, just keep processing the string... } } return TsonToken.String(tokenLocation, SliceInput(ref tokenLocation, ref currentLocation)); }
public TsonParseException(TsonToken token, string message) : base(message) { this.Data["Location"] = token.Location; }
public TsonFormatException(TsonToken token) : this(token, "") { }
public TsonFormatException(TsonToken token, string message, Exception innerException) : base(message, innerException) { this.Data["Location"] = token.Location; }
public TsonFormatException(TsonToken token, string message) : this(token, message, null) { }
static void AssertTokens(TsonTokenizer tokenizer, TsonToken[] expectedTokens) { TsonToken token = tokenizer.PeekNext(); TsonToken expectedToken = expectedTokens[0]; Assert.AreEqual(expectedToken.TokenType, token.TokenType); Assert.AreEqual(expectedToken.Location.Offset, token.Location.Offset); Assert.AreEqual(expectedToken.Data, token.Data); for (int i = 0; i < expectedTokens.Length; i++) { expectedToken = expectedTokens[i]; token = tokenizer.Next(); string s = String.Format("Token '{0}', Offset {1}, Data {2}", expectedToken.TokenType.ToString(), expectedToken.Location.Offset, expectedToken.Data == null ? "null" : expectedToken.Data); Assert.AreEqual(expectedToken.TokenType, token.TokenType, s); Assert.AreEqual(expectedToken.Location.Offset, token.Location.Offset, s); Assert.AreEqual(expectedToken.Data, token.Data, s); } token = tokenizer.Next(); Assert.IsTrue(token.IsEnd); token = tokenizer.PeekNext(); Assert.IsTrue(token.IsEnd); }