public void WriteChunkString(ChunkId chunkID, string value) { byte[] data = Encoding.UTF8.GetBytes(value); chunkID.ToStream(_writer); LEB128.Write(_writer, data.Length); _writer.Write(data); }
public void WriteChunkWithChildren(ChunkId chunkID, WriteChunkDelegate writeChunk, long maximumSize = LEB128.MaximumSize4Byte) { using (WriteChunk(chunkID, maximumSize)) { writeChunk(); WriteChunkEnd(); } }
public ChunkWritingState(ChunkWriter chunkWriter, ChunkId chunkID, long maximumSize) { _chunkWriter = chunkWriter; // Write chunk ID chunkID.ToStream(chunkWriter._writer); // Write chunk size _chunkSizePosition = chunkWriter._writer.Position; LEB128.Write(chunkWriter._writer, 0, maximumSize); // Prepare for writeing chunk content _previousPosition = chunkWriter._writer.Position; _maximumSize = maximumSize; }
public void ReadChunks(ReadChunkDelegate tryParseChunk) { do { ChunkId chunkID = ChunkId.FromStream(_reader); if (chunkID == ChunkId.Empty) // End reached { break; } // Check if chunk is known, and if not, mark it if (_knownChunkList != null && !UnknownChunksFound && !_knownChunkList.Contains(chunkID)) { UnknownChunksFound = true; } // Read up to a 64 bit number for the chunk size long chunkSize = LEB128.ReadLong(_reader); // Try loading chunk content long chunkStart = _reader.BaseStream.Position; bool chunkRecognized = false; Exception chunkException = null; try { chunkRecognized = tryParseChunk(chunkID, chunkSize); } catch (OperationCanceledException) { // Don't actually keep going if it's an 'OperationCanceledException' throw; } catch (Exception exc) { chunkException = exc; } long readDataCount = _reader.BaseStream.Position - chunkStart; // Print messages for various problems that might have occurred while loading if (chunkException != null) { logger.Error(chunkException, "Chunk loading raised an exception" + GetLocoationStr(chunkStart, chunkSize, chunkID)); } else if (!chunkRecognized) { logger.Warn("Chunk not recognized" + GetLocoationStr(chunkStart, chunkSize, chunkID)); } else if (readDataCount > chunkSize) { logger.Error("More data was read than available (Read: " + readDataCount + " Available: " + chunkSize + ")" + GetLocoationStr(chunkStart, chunkSize, chunkID)); } else if (readDataCount < chunkSize) { logger.Warn("Not all the available data was read (Read: " + readDataCount + " Available: " + chunkSize + ")" + GetLocoationStr(chunkStart, chunkSize, chunkID)); } // Adjust _stream position if necessaary if (readDataCount != chunkSize) { _reader.BaseStream.Position = chunkStart + chunkSize; } } while (true); }
// Raise exceptions if there were any problems private static string GetLocoationStr(long chunkStart, long chunkSize, ChunkId chunkID) { return(" at offset " + chunkStart + " with size " + chunkSize + ". " + chunkID); }
public void WriteChunkVector4(ChunkId chunkID, Vector4 value) { chunkID.ToStream(_writer); LEB128.Write(_writer, 16); _writer.Write(value); }
public void WriteChunkFloat(ChunkId chunkID, double value) { chunkID.ToStream(_writer); LEB128.Write(_writer, 8); _writer.Write(value); }
public void WriteChunkInt(ChunkId chunkID, long value) { chunkID.ToStream(_writer); LEB128.Write(_writer, LEB128.GetLength(_writer, value)); LEB128.Write(_writer, value); }
public void WriteChunkArrayOfBytes(ChunkId chunkID, byte[] value) { chunkID.ToStream(_writer); LEB128.Write(_writer, value.Length); _writer.Write(value); }
public void WriteChunkBool(ChunkId chunkID, bool value) { chunkID.ToStream(_writer); LEB128.Write(_writer, 1); _writer.Write(value); }
public void WriteChunkEmpty(ChunkId chunkID) { chunkID.ToStream(_writer); LEB128.Write(_writer, 0); }
public void WriteChunk(ChunkId chunkID, WriteChunkDelegate writeChunk, long maximumSize = LEB128.MaximumSize4Byte) { using (WriteChunk(chunkID, maximumSize)) writeChunk(); }
public ChunkWritingState WriteChunk(ChunkId chunkID, long maximumSize = LEB128.MaximumSize4Byte) => new ChunkWritingState(this, chunkID, maximumSize);