// Token: 0x06002BA8 RID: 11176 RVA: 0x000A3C4C File Offset: 0x000A1E4C internal void ChangeFormat(Encoding encoding) { if (encoding == null) { return; } Tokenizer.TokenSource inTokenSource = this._inTokenSource; if (inTokenSource > Tokenizer.TokenSource.ASCIIByteArray) { if (inTokenSource - Tokenizer.TokenSource.CharArray <= 2) { return; } } else { if (encoding == Encoding.Unicode) { this._inTokenSource = Tokenizer.TokenSource.UnicodeByteArray; return; } if (encoding == Encoding.UTF8) { this._inTokenSource = Tokenizer.TokenSource.UTF8ByteArray; return; } if (encoding == Encoding.ASCII) { this._inTokenSource = Tokenizer.TokenSource.ASCIIByteArray; return; } } inTokenSource = this._inTokenSource; Stream stream; if (inTokenSource > Tokenizer.TokenSource.ASCIIByteArray) { if (inTokenSource - Tokenizer.TokenSource.CharArray <= 2) { return; } Tokenizer.StreamTokenReader streamTokenReader = this._inTokenReader as Tokenizer.StreamTokenReader; if (streamTokenReader == null) { return; } stream = streamTokenReader._in.BaseStream; string s = new string(' ', streamTokenReader.NumCharEncountered); stream.Position = (long)streamTokenReader._in.CurrentEncoding.GetByteCount(s); } else { stream = new MemoryStream(this._inBytes, this._inIndex, this._inSize - this._inIndex); } this._inTokenReader = new Tokenizer.StreamTokenReader(new StreamReader(stream, encoding)); this._inTokenSource = Tokenizer.TokenSource.Other; }
internal void ChangeFormat(Encoding encoding) { if (encoding == null) { return; } switch (this._inTokenSource) { case Tokenizer.TokenSource.UnicodeByteArray: case Tokenizer.TokenSource.UTF8ByteArray: case Tokenizer.TokenSource.ASCIIByteArray: if (encoding == Encoding.Unicode) { this._inTokenSource = Tokenizer.TokenSource.UnicodeByteArray; return; } if (encoding == Encoding.UTF8) { this._inTokenSource = Tokenizer.TokenSource.UTF8ByteArray; return; } if (encoding == Encoding.ASCII) { this._inTokenSource = Tokenizer.TokenSource.ASCIIByteArray; return; } break; case Tokenizer.TokenSource.CharArray: return; case Tokenizer.TokenSource.String: return; case Tokenizer.TokenSource.NestedStrings: return; } Stream stream; switch (this._inTokenSource) { case Tokenizer.TokenSource.UnicodeByteArray: case Tokenizer.TokenSource.UTF8ByteArray: case Tokenizer.TokenSource.ASCIIByteArray: stream = (Stream) new MemoryStream(this._inBytes, this._inIndex, this._inSize - this._inIndex); break; case Tokenizer.TokenSource.CharArray: return; case Tokenizer.TokenSource.String: return; case Tokenizer.TokenSource.NestedStrings: return; default: Tokenizer.StreamTokenReader streamTokenReader = this._inTokenReader as Tokenizer.StreamTokenReader; if (streamTokenReader == null) { return; } stream = streamTokenReader._in.BaseStream; string s = new string(' ', streamTokenReader.NumCharEncountered); stream.Position = (long)streamTokenReader._in.CurrentEncoding.GetByteCount(s); break; } this._inTokenReader = (Tokenizer.ITokenReader) new Tokenizer.StreamTokenReader(new StreamReader(stream, encoding)); this._inTokenSource = Tokenizer.TokenSource.Other; }