public void WriteChunkString(ChunkId chunkID, string value) { byte[] data = Encoding.UTF8.GetBytes(value); chunkID.ToStream(_writer); LEB128.Write(_writer, data.Length); _writer.Write(data); }
public void Dispose() { // Update chunk size long newPosition = _chunkWriter._writer.Position; long chunkSize = newPosition - _previousPosition; _chunkWriter._writer.Position = _chunkSizePosition; LEB128.Write(_chunkWriter._writer, chunkSize, _maximumSize); _chunkWriter._writer.Position = newPosition; }
public ChunkWritingState(ChunkWriter chunkWriter, ChunkId chunkID, long maximumSize) { _chunkWriter = chunkWriter; // Write chunk ID chunkID.ToStream(chunkWriter._writer); // Write chunk size _chunkSizePosition = chunkWriter._writer.Position; LEB128.Write(chunkWriter._writer, 0, maximumSize); // Prepare for writeing chunk content _previousPosition = chunkWriter._writer.Position; _maximumSize = maximumSize; }
public ushort ReadChunkUshort(long length) { return(LEB128.ReadUShort(_reader)); }
public short ReadChunkShort(long length) { return(LEB128.ReadShort(_reader)); }
public uint ReadChunkUInt(long length) { return(LEB128.ReadUInt(_reader)); }
public int ReadChunkInt(long length) { return(LEB128.ReadInt(_reader)); }
public long ReadChunkLong(long length) { return(LEB128.ReadLong(_reader)); }
public void WriteChunkEmpty(ChunkId chunkID) { chunkID.ToStream(_writer); LEB128.Write(_writer, 0); }
public void ReadChunks(ReadChunkDelegate tryParseChunk) { do { ChunkId chunkID = ChunkId.FromStream(_reader); if (chunkID == ChunkId.Empty) // End reached { break; } // Check if chunk is known, and if not, mark it if (_knownChunkList != null && !UnknownChunksFound && !_knownChunkList.Contains(chunkID)) { UnknownChunksFound = true; } // Read up to a 64 bit number for the chunk size long chunkSize = LEB128.ReadLong(_reader); // Try loading chunk content long chunkStart = _reader.BaseStream.Position; bool chunkRecognized = false; Exception chunkException = null; try { chunkRecognized = tryParseChunk(chunkID, chunkSize); } catch (OperationCanceledException) { // Don't actually keep going if it's an 'OperationCanceledException' throw; } catch (Exception exc) { chunkException = exc; } long readDataCount = _reader.BaseStream.Position - chunkStart; // Print messages for various problems that might have occurred while loading if (chunkException != null) { logger.Error(chunkException, "Chunk loading raised an exception" + GetLocoationStr(chunkStart, chunkSize, chunkID)); } else if (!chunkRecognized) { logger.Warn("Chunk not recognized" + GetLocoationStr(chunkStart, chunkSize, chunkID)); } else if (readDataCount > chunkSize) { logger.Error("More data was read than available (Read: " + readDataCount + " Available: " + chunkSize + ")" + GetLocoationStr(chunkStart, chunkSize, chunkID)); } else if (readDataCount < chunkSize) { logger.Warn("Not all the available data was read (Read: " + readDataCount + " Available: " + chunkSize + ")" + GetLocoationStr(chunkStart, chunkSize, chunkID)); } // Adjust _stream position if necessaary if (readDataCount != chunkSize) { _reader.BaseStream.Position = chunkStart + chunkSize; } } while (true); }
public void WriteChunkVector4(ChunkId chunkID, Vector4 value) { chunkID.ToStream(_writer); LEB128.Write(_writer, 16); _writer.Write(value); }
public void WriteChunkFloat(ChunkId chunkID, double value) { chunkID.ToStream(_writer); LEB128.Write(_writer, 8); _writer.Write(value); }
public void WriteChunkInt(ChunkId chunkID, long value) { chunkID.ToStream(_writer); LEB128.Write(_writer, LEB128.GetLength(_writer, value)); LEB128.Write(_writer, value); }
public void WriteChunkArrayOfBytes(ChunkId chunkID, byte[] value) { chunkID.ToStream(_writer); LEB128.Write(_writer, value.Length); _writer.Write(value); }
public void WriteChunkBool(ChunkId chunkID, bool value) { chunkID.ToStream(_writer); LEB128.Write(_writer, 1); _writer.Write(value); }
public sbyte ReadChunkSByte(long length) { return(LEB128.ReadSByte(_reader)); }
public byte ReadChunkByte(long length) { return(LEB128.ReadByte(_reader)); }
public static ChunkId FromStream(BinaryReader stream) { int idLength = LEB128.ReadInt(stream); return(new ChunkId(stream.ReadBytes(idLength), idLength)); // If this turns out to be slow, we might want to kind of caching to reuse an array. }
public void WriteChunkInt(int chunkID, long value) { _writer.Write((int)chunkID); _writer.Write((long)8); LEB128.Write(_writer, value); }
public void ToStream(BinaryWriterFast stream) { LEB128.Write(stream, _idLength); stream.Write(_idBytes, 0, _idLength); }