private static bool LoadChunkFromDisk(Chunk chunk, string filePath) { try { // Read data from file byte[] filedata; FileStream fs = null; try { fs = new FileStream(filePath, FileMode.Open); using (var br = new BinaryReader(fs)) { // Read filled block count chunk.NonEmptyBlocks = br.ReadInt16(); // Read section offsets chunk.MaxRenderY = br.ReadInt16(); chunk.MinRenderY = br.ReadInt16(); // Read chunk data filedata = br.ReadBytes((int)(fs.Length - 6)); // 6 = size of previous data (NonEmptyBlocks + MaxRenderY + MinRenderY) fs = null; } } finally { if (fs != null) { fs.Dispose(); } } // Convert byte array to array of BlockData structs chunk.Blocks.RLE.Assign(StructSerializers.DeserializeArrayToList <RLEDataPair <BlockData> >(ref filedata)); // Decompress data //chunk.Blocks.IsCompressed = false; var decompressedData = chunk.Blocks.RLE.Decompress(); chunk.Blocks.Set(ref decompressedData); } catch (Exception ex) { string s = string.Format("Cannot load chunk '{0}' ([{1},{2},{3}]): {4}", filePath, chunk.Pos.X, chunk.Pos.Y, chunk.Pos.Z, ex); Debug.LogError(s); return(false); } return(true); }
public static bool StoreChunkToDisk(Chunk chunk, string filePath) { try { // Make sure the data is compressed //chunk.Blocks.IsCompressed = true; // Serialize compressed data var buff = StructSerializers.SerializeArray(chunk.Blocks.RLE.List); FileStream fs = null; try { fs = new FileStream(filePath, FileMode.Create); using (BinaryWriter bw = new BinaryWriter(fs)) { fs = null; // Store number of filled block for each section. Using short limits max section size to 16x16x16 bw.Write((short)chunk.NonEmptyBlocks); // Store chunk offsets bw.Write((short)chunk.MaxRenderY); bw.Write((short)chunk.MinRenderY); // Store block data bw.Write(buff); } } finally { if (fs != null) { fs.Dispose(); } } } catch (Exception ex) { Debug.LogErrorFormat("Cannot save chunk '{0}' ([{1},{2},{3}]): {4}", filePath, chunk.Pos.X, chunk.Pos.Y, chunk.Pos.Z, ex); return(false); } return(true); }