public void SaveToBuffer() { var littleTag = new NbtCompound("Root"); var testFile = new NbtFile(littleTag); byte[] buffer1 = testFile.SaveToBuffer(NbtCompression.None); var buffer2 = new byte[buffer1.Length]; Assert.AreEqual(testFile.SaveToBuffer(buffer2, 0, NbtCompression.None), buffer2.Length); CollectionAssert.AreEqual(buffer1, buffer2); }
public void SkippingLists() { var file = new NbtFile(TestFiles.MakeListTest()); byte[] savedFile = file.SaveToBuffer(NbtCompression.None); file.LoadFromBuffer(savedFile, 0, savedFile.Length, NbtCompression.None, tag => false); Assert.AreEqual(file.RootTag.Count, 0); }
public void SkippingLists() { { var file = new NbtFile(TestFiles.MakeListTest()); byte[] savedFile = file.SaveToBuffer(NbtCompression.None); file.LoadFromBuffer(savedFile, 0, savedFile.Length, NbtCompression.None, tag => tag.TagType != NbtTagType.List); Assert.AreEqual(0, file.RootTag.Count); } { // Check list-compound interaction NbtCompound comp = new NbtCompound("root") { new NbtCompound("compOfLists") { new NbtList("listOfComps") { new NbtCompound { new NbtList("emptyList", NbtTagType.Compound) } } } }; var file = new NbtFile(comp); byte[] savedFile = file.SaveToBuffer(NbtCompression.None); file.LoadFromBuffer(savedFile, 0, savedFile.Length, NbtCompression.None, tag => tag.TagType != NbtTagType.List); Assert.AreEqual(1, file.RootTag.Count); } }
private byte[] GetBytes() { if (_cache != null) { return(_cache); } MemoryStream stream = new MemoryStream(); { NbtBinaryWriter writer = new NbtBinaryWriter(stream, true); writer.Write(blocks); writer.Write(metadata.Data); writer.Write(skylight.Data); writer.Write(blocklight.Data); //RecalcHeight(); writer.Write(height); for (int i = 0; i < biomeColor.Length; i++) { writer.Write(biomeColor[i]); } int extraSize = 0; writer.Write(extraSize); // No extra data if (BlockEntities.Count == 0) { NbtFile file = new NbtFile(new NbtCompound(string.Empty)) { BigEndian = false }; writer.Write(file.SaveToBuffer(NbtCompression.None)); } else { foreach (NbtCompound blockEntity in BlockEntities.Values.ToArray()) { NbtFile file = new NbtFile(blockEntity) { BigEndian = false }; writer.Write(file.SaveToBuffer(NbtCompression.None)); } } writer.Flush(); writer.Close(); } var bytes = stream.ToArray(); stream.Close(); _cache = bytes; return(bytes); }
private void WriteNbtToDB(string key, NbtFile file) { file.BigEndian = false; var bytes = file.SaveToBuffer(NbtCompression.None); BedrockDB.Put(key, bytes); }
private byte[] WriteNbtToBytes(NbtCompound root) { NbtFile file = new NbtFile(root); file.BigEndian = false; return(file.SaveToBuffer(NbtCompression.None)); }
public override bool AddChests(IEnumerable <long> mapids, string playerid) { OpenDB(); // acquire the file this player is stored in, and the tag that represents said player string file_identifier; if (playerid == LOCAL_IDENTIFIER) { file_identifier = "~local_player"; } else { file_identifier = UuidToKey(playerid); } byte[] playerdata = BedrockDB.Get(file_identifier); if (playerdata == null) { throw new FileNotFoundException($"Player with UUID {playerid} not found"); } var file = new NbtFile(); file.BigEndian = false; file.LoadFromBuffer(playerdata, 0, playerdata.Length, NbtCompression.None); var invtag = (NbtList)file.RootTag["Inventory"]; var success = PutChestsInInventory(invtag, mapids); byte[] bytes = file.SaveToBuffer(NbtCompression.None); BedrockDB.Put(file_identifier, bytes); CloseDB(); return(success); }
public void SerializingEmpty() { // check saving/loading lists of all possible value types var testFile = new NbtFile(new NbtCompound("root") { new NbtList("emptyList", NbtTagType.End), new NbtList("listyList", NbtTagType.List) { new NbtList(NbtTagType.End) } }); byte[] buffer = testFile.SaveToBuffer(NbtCompression.None); testFile.LoadFromBuffer(buffer, 0, buffer.Length, NbtCompression.None); NbtList list1 = testFile.RootTag.Get <NbtList>("emptyList"); Assert.AreEqual(list1.Count, 0); Assert.AreEqual(list1.ListType, NbtTagType.End); NbtList list2 = testFile.RootTag.Get <NbtList>("listyList"); Assert.AreEqual(list2.Count, 1); Assert.AreEqual(list2.ListType, NbtTagType.List); Assert.AreEqual(list2.Get <NbtList>(0).Count, 0); Assert.AreEqual(list2.Get <NbtList>(0).ListType, NbtTagType.End); }
public void NestedListAndCompoundTest() { byte[] data; { var root = new NbtCompound("Root"); var outerList = new NbtList("OuterList", NbtTagType.Compound); var outerCompound = new NbtCompound(); var innerList = new NbtList("InnerList", NbtTagType.Compound); var innerCompound = new NbtCompound(); innerList.Add(innerCompound); outerCompound.Add(innerList); outerList.Add(outerCompound); root.Add(outerList); var file = new NbtFile(root); data = file.SaveToBuffer(NbtCompression.None); } { var file = new NbtFile(); long bytesRead = file.LoadFromBuffer(data, 0, data.Length, NbtCompression.None); Assert.Equal(bytesRead, data.Length); Assert.Single(file.RootTag.Get <NbtList>("OuterList")); Assert.Null(file.RootTag.Get <NbtList>("OuterList").Get <NbtCompound>(0).Name); Assert.Single(file.RootTag.Get <NbtList>("OuterList") .Get <NbtCompound>(0) .Get <NbtList>("InnerList")); Assert.Null(file.RootTag.Get <NbtList>("OuterList") .Get <NbtCompound>(0) .Get <NbtList>("InnerList") .Get <NbtCompound>(0) .Name); } }
public override void AddMaps(Dictionary <long, Map> maps) { var batch = new LevelDB.WriteBatch(); foreach (var map in maps) { NbtCompound mapfile = new NbtCompound("map") { new NbtLong("mapId", map.Key), new NbtLong("parentMapId", -1), new NbtList("decorations", NbtTagType.Compound), new NbtByte("fullyExplored", 1), new NbtByte("scale", 4), new NbtByte("dimension", 0), new NbtShort("height", Map.MAP_HEIGHT), new NbtShort("width", Map.MAP_WIDTH), new NbtByte("unlimitedTracking", 0), new NbtInt("xCenter", Int32.MaxValue), new NbtInt("zCenter", Int32.MaxValue), new NbtByte("mapLocked", 1), new NbtByteArray("colors", map.Value.Colors) }; NbtFile file = new NbtFile(mapfile); file.BigEndian = false; byte[] bytes = file.SaveToBuffer(NbtCompression.None); batch.Put(Encoding.Default.GetBytes($"map_{map.Key}"), bytes); } BedrockDB.Write(batch); }
public static void SaveToProtocolStream(this NbtFile nbtFile, PacketStream stream, NbtCompression gZip) { var data = nbtFile.SaveToBuffer(gZip); stream.Write((short)data.Length); stream.Write(data); }
public override bool AddChests(IEnumerable <long> mapids, string playerid) { // acquire the file this player is stored in, and the tag that represents said player byte[] playeridbytes; if (playerid == LOCAL_IDENTIFIER) { playeridbytes = Encoding.Default.GetBytes("~local_player"); } else { playeridbytes = Encoding.Default.GetBytes(playerid); } byte[] playerdata = BedrockDB.Get(playeridbytes.ToArray()); var file = new NbtFile(); file.BigEndian = false; file.LoadFromBuffer(playerdata, 0, playerdata.Length, NbtCompression.None); var invtag = (NbtList)file.RootTag["Inventory"]; var success = PutChestsInInventory(invtag, mapids); byte[] bytes = file.SaveToBuffer(NbtCompression.None); BedrockDB.Put(playeridbytes, bytes); return(success); }
static NbtFile PartialReadTestInternal(NbtFile comp) { byte[] testData = comp.SaveToBuffer(NbtCompression.None); var reader = new NbtReader(new PartialReadStream(new MemoryStream(testData))); var root = (NbtCompound)reader.ReadAsTag(); return(new NbtFile(root)); }
public void Write(Nbt nbt) { NbtFile file = nbt.NbtFile; file.BigEndian = false; file.UseVarInt = this is McpeBlockEntityData; Write(file.SaveToBuffer(NbtCompression.None)); }
private byte[] GetNbtData(NbtCompound nbtCompound) { nbtCompound.Name = string.Empty; var file = new NbtFile(nbtCompound); file.BigEndian = false; return(file.SaveToBuffer(NbtCompression.None)); }
public void Serializing2() { // check saving/loading lists of all possible value types var testFile = new NbtFile(TestFiles.MakeListTest()); byte[] buffer = testFile.SaveToBuffer(NbtCompression.None); long bytesRead = testFile.LoadFromBuffer(buffer, 0, buffer.Length, NbtCompression.None); Assert.AreEqual(bytesRead, buffer.Length); }
public void SkippingValuesInCompoundTest() { NbtCompound root = TestFiles.MakeValueTest(); NbtCompound nestedComp = TestFiles.MakeValueTest(); nestedComp.Name = "NestedComp"; root.Add(nestedComp); var file = new NbtFile(root); byte[] savedFile = file.SaveToBuffer(NbtCompression.None); file.LoadFromBuffer(savedFile, 0, savedFile.Length, NbtCompression.None, tag => false); Assert.AreEqual(0, file.RootTag.Count); }
public void RootTagTest() { NbtCompound oldRoot = new NbtCompound("defaultRoot"); NbtFile newFile = new NbtFile(oldRoot); // Ensure that inappropriate tags are not accepted as RootTag Assert.Throws <ArgumentNullException>(() => newFile.RootTag = null); Assert.Throws <ArgumentException>(() => newFile.RootTag = new NbtCompound()); // Ensure that the root has not changed Assert.AreSame(oldRoot, newFile.RootTag); // Invalidate the root tag, and ensure that expected exception is thrown oldRoot.Name = null; Assert.Throws <NbtFormatException>(() => newFile.SaveToBuffer(NbtCompression.None)); }
public void Serializing1() { // check the basics of saving/loading const NbtTagType expectedListType = NbtTagType.Int; const int elements = 10; // construct nbt file var writtenFile = new NbtFile(new NbtCompound("ListTypeTest")); var writtenList = new NbtList("Entities", null, expectedListType); for (int i = 0; i < elements; i++) { writtenList.Add(new NbtInt(i)); } NbtCompound rootTag = (NbtCompound)writtenFile.RootTag; rootTag.Add(writtenList); // test saving byte[] data = writtenFile.SaveToBuffer(NbtCompression.None); // test loading var readFile = new NbtFile(); long bytesRead = readFile.LoadFromBuffer(data, 0, data.Length, NbtCompression.None); Assert.AreEqual(bytesRead, data.Length); // check contents of loaded file Assert.NotNull(readFile.RootTag); Assert.IsInstanceOf <NbtList>(readFile.RootTag["Entities"]); var readList = (NbtList)readFile.RootTag["Entities"]; Assert.AreEqual(writtenList.ListType, readList.ListType); Assert.AreEqual(readList.Count, writtenList.Count); // check .ToArray CollectionAssert.AreEquivalent(readList, readList.ToArray()); CollectionAssert.AreEquivalent(readList, readList.ToArray <NbtInt>()); // check contents of loaded list for (int i = 0; i < elements; i++) { Assert.AreEqual(readList.Get <NbtInt>(i).Value, writtenList.Get <NbtInt>(i).Value); } }
private byte[] GetBytes() { if (_cache != null) { return(_cache); } MemoryStream stream = new MemoryStream(); { NbtBinaryWriter writer = new NbtBinaryWriter(stream, true); writer.Write(blocks); writer.Write(metadata.Data); writer.Write(skylight.Data); writer.Write(blocklight.Data); //RecalcHeight(); writer.Write(height); for (int i = 0; i < biomeColor.Length; i++) { writer.Write(biomeColor[i]); } foreach (var blockEntity in BlockEntities.Values) { NbtFile file = new NbtFile(blockEntity); file.BigEndian = false; writer.Write(file.SaveToBuffer(NbtCompression.None)); } writer.Flush(); writer.Close(); } var bytes = stream.ToArray(); stream.Close(); _cache = bytes; return(bytes); }
public override void WriteTo(IMinecraftStream stream, byte index) { stream.WriteUInt8(GetKey(index)); stream.WriteInt16(Value.ID); if (Value.ID != -1) { stream.WriteInt8(Value.Count); stream.WriteInt16(Value.Metadata); if (Value.Nbt != null) { var file = new NbtFile(Value.Nbt); var data = file.SaveToBuffer(NbtCompression.GZip); stream.WriteInt16((short)data.Length); stream.WriteUInt8Array(data); } else { stream.WriteInt16(-1); } } }
public void ToNbtTest() { var levelInfo = new LevelInfoBedrock(); levelInfo.GameType = 1; levelInfo.Generator = 1; levelInfo.LastPlayed = 1594911609; levelInfo.LevelName = "BedrockGeneratedLevel"; levelInfo.Platform = 2; levelInfo.RandomSeed = 3429004588; levelInfo.SpawnX = 44; levelInfo.SpawnY = 32767; levelInfo.SpawnZ = 4; levelInfo.Time = 269000; levelInfo.SpawnMobs = 1; NbtTag nbt = levelInfo.Serialize(); Console.WriteLine(nbt); Assert.IsNotNull(nbt); Assert.IsInstanceOfType(nbt, typeof(NbtCompound)); Assert.AreEqual(levelInfo.GameType, nbt["GameType"].IntValue); Assert.AreEqual(levelInfo.Generator, nbt["Generator"].IntValue); Assert.AreEqual(levelInfo.LevelName, nbt["LevelName"].StringValue); var file = new NbtFile { BigEndian = false, UseVarInt = false, }; file.RootTag = nbt; var bytes = file.SaveToBuffer(NbtCompression.None); using FileStream stream = File.Create(@"C:\Temp\TrashBedrockWorld\level_test_generated.dat"); stream.Write(new ReadOnlySpan <byte>(new byte[] { 0x08, 0, 0, 0 })); stream.Write(BitConverter.GetBytes(bytes.Length)); stream.Write(bytes); stream.Flush(); }
public void NullParameterTest() { Assert.Throws <ArgumentNullException>(() => new NbtFile((NbtCompound)null)); Assert.Throws <ArgumentNullException>(() => new NbtFile((string)null)); NbtFile file = new NbtFile(); Assert.Throws <ArgumentNullException>(() => file.LoadFromBuffer(null, 0, 1, NbtCompression.None)); Assert.Throws <ArgumentNullException>(() => file.LoadFromBuffer(null, 0, 1, NbtCompression.None, tag => true)); Assert.Throws <ArgumentNullException>(() => file.LoadFromFile(null)); Assert.Throws <ArgumentNullException>(() => file.LoadFromFile(null, NbtCompression.None, tag => true)); Assert.Throws <ArgumentNullException>(() => file.LoadFromStream(null, NbtCompression.AutoDetect)); Assert.Throws <ArgumentNullException>(() => file.LoadFromStream(null, NbtCompression.AutoDetect, tag => true)); Assert.Throws <ArgumentNullException>(() => file.SaveToBuffer(null, 0, NbtCompression.None)); Assert.Throws <ArgumentNullException>(() => file.SaveToFile(null, NbtCompression.None)); Assert.Throws <ArgumentNullException>(() => file.SaveToStream(null, NbtCompression.None)); Assert.Throws <ArgumentNullException>(() => NbtFile.ReadRootTagName(null)); Assert.Throws <ArgumentNullException>( () => NbtFile.ReadRootTagName((Stream)null, NbtCompression.None, true, 0)); }
public static void SaveChunk(ChunkColumn chunk, string basePath, int yoffset) { var coordinates = new ChunkCoordinates(chunk.x, chunk.z); int width = 32; int depth = 32; int rx = coordinates.X >> 5; int rz = coordinates.Z >> 5; string filePath = Path.Combine(basePath, string.Format(@"region\r.{0}.{1}.mca", rx, rz)); if (!File.Exists(filePath)) { // Make sure directory exist Directory.CreateDirectory(Path.Combine(basePath, "region")); // Create empty region file using (var regionFile = File.Open(filePath, FileMode.CreateNew)) { byte[] buffer = new byte[8192]; regionFile.Write(buffer, 0, buffer.Length); } return; } using (var regionFile = File.Open(filePath, FileMode.Open)) { byte[] buffer = new byte[8192]; regionFile.Read(buffer, 0, buffer.Length); int xi = (coordinates.X % width); if (xi < 0) { xi += 32; } int zi = (coordinates.Z % depth); if (zi < 0) { zi += 32; } int tableOffset = (xi + zi * width) * 4; regionFile.Seek(tableOffset, SeekOrigin.Begin); byte[] offsetBuffer = new byte[4]; regionFile.Read(offsetBuffer, 0, 3); Array.Reverse(offsetBuffer); int offset = BitConverter.ToInt32(offsetBuffer, 0) << 4; int length = regionFile.ReadByte(); if (offset == 0 || length == 0) { regionFile.Seek(0, SeekOrigin.End); offset = (int)regionFile.Position; regionFile.Seek(tableOffset, SeekOrigin.Begin); byte[] bytes = BitConverter.GetBytes(offset >> 4); Array.Reverse(bytes); regionFile.Write(bytes, 0, 3); regionFile.WriteByte(1); } // Write NBT NbtFile nbt = CreateNbtFromChunkColumn(chunk, yoffset); byte[] nbtBuf = nbt.SaveToBuffer(NbtCompression.ZLib); int lenght = nbtBuf.Length; byte[] lenghtBytes = BitConverter.GetBytes(lenght + 1); Array.Reverse(lenghtBytes); regionFile.Seek(offset, SeekOrigin.Begin); regionFile.Write(lenghtBytes, 0, 4); // Lenght regionFile.WriteByte(0x02); // Compression mode regionFile.Write(nbtBuf, 0, nbtBuf.Length); int reminder; Math.DivRem(lenght + 4, 4096, out reminder); byte[] padding = new byte[4096 - reminder]; if (padding.Length > 0) { regionFile.Write(padding, 0, padding.Length); } } }
public void WriteMetadata(EntityMetadata dt) { WriteByte(dt.Index); object o = dt.Value; switch (dt.Type) { case EntityMetadataType.Byte: WriteByte((byte)o); break; case EntityMetadataType.VarInt: WriteVarInt((VarInt)o); break; case EntityMetadataType.Float: WriteFloat((float)o); break; case EntityMetadataType.String: WriteString((string)o); break; case EntityMetadataType.Chat: ChatBuilder b = (ChatBuilder)o; WriteString(b.Generate()); break; case EntityMetadataType.OptChat: OptChat optC = (OptChat)o; if (optC.Enabled) { WriteString(optC.Chat.Generate()); } break; case EntityMetadataType.Slot: Slot slot = (Slot)o; short id = (short)slot.ItemID.Value; WriteShort(id); if (id != -1) { WriteSByte((sbyte)slot.ItemCount); if (slot.OptionalNbt != null) { var file = new NbtFile(slot.OptionalNbt); var data = file.SaveToBuffer(NbtCompression.GZip); WriteShort((short)data.Length); WriteByteArray(data); } else { WriteShort(-1); } } break; case EntityMetadataType.Boolean: WriteBoolean((bool)o); break; } WriteByte(Closer); }
public static void SaveChunk(ChunkColumn chunk, string basePath) { // WARNING: This method does not consider growing size of the chunks. Needs refactoring to find // free sectors and clear up old ones. It works fine as long as no dynamic data is written // like block entity data (signs etc). Stopwatch time = new Stopwatch(); time.Restart(); chunk.NeedSave = false; var coordinates = new ChunkCoordinates(chunk.x, chunk.z); int width = 32; int depth = 32; int rx = coordinates.X >> 5; int rz = coordinates.Z >> 5; string filePath = Path.Combine(basePath, string.Format(@"region{2}r.{0}.{1}.mca", rx, rz, Path.DirectorySeparatorChar)); Log.Debug($"Save chunk X={chunk.x}, Z={chunk.z} to {filePath}"); if (!File.Exists(filePath)) { // Make sure directory exist Directory.CreateDirectory(Path.Combine(basePath, "region")); // Create empty region file using (var regionFile = File.Open(filePath, FileMode.CreateNew)) { byte[] buffer = new byte[8192]; regionFile.Write(buffer, 0, buffer.Length); } } Stopwatch testTime = new Stopwatch(); using (var regionFile = File.Open(filePath, FileMode.Open)) { int locationIndex = ((coordinates.X & (width - 1)) + (coordinates.Z & (depth - 1)) * width) << 2; regionFile.Seek(locationIndex, SeekOrigin.Begin); byte[] offsetBuffer = new byte[4]; regionFile.Read(offsetBuffer, 0, 3); Array.Reverse(offsetBuffer); int offset = BitConverter.ToInt32(offsetBuffer, 0) << 4; int sectorCount = regionFile.ReadByte(); testTime.Restart(); // RESTART // Seriaize NBT to get lenght NbtFile nbt = CreateNbtFromChunkColumn(chunk); testTime.Stop(); byte[] nbtBuf = nbt.SaveToBuffer(NbtCompression.ZLib); int nbtLength = nbtBuf.Length; byte nbtSectorCount = (byte)Math.Ceiling(nbtLength / 4096d); // Don't write yet, just use the lenght //TODO: Search for available sectors if (offset == 0 || sectorCount == 0 || nbtSectorCount > sectorCount) { if (Log.IsDebugEnabled) { if (sectorCount != 0) { Log.Warn($"Creating new sectors for this chunk even tho it existed. Old sector count={sectorCount}, new sector count={nbtSectorCount} (lenght={nbtLength})"); } } regionFile.Seek(0, SeekOrigin.End); offset = (int)((int)regionFile.Position & 0xfffffff0); regionFile.Seek(locationIndex, SeekOrigin.Begin); byte[] bytes = BitConverter.GetBytes(offset >> 4); Array.Reverse(bytes); regionFile.Write(bytes, 0, 3); regionFile.WriteByte(nbtSectorCount); regionFile.Seek(4096 + locationIndex, SeekOrigin.Begin); bytes = BitConverter.GetBytes((int)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds); Array.Reverse(bytes); regionFile.Write(bytes, 0, bytes.Length); } byte[] lenghtBytes = BitConverter.GetBytes(nbtLength + 1); Array.Reverse(lenghtBytes); regionFile.Seek(offset, SeekOrigin.Begin); regionFile.Write(lenghtBytes, 0, 4); // Lenght regionFile.WriteByte(0x02); // Compression mode regionFile.Write(nbtBuf, 0, nbtBuf.Length); int reminder; Math.DivRem(nbtLength + 4, 4096, out reminder); byte[] padding = new byte[4096 - reminder]; if (padding.Length > 0) { regionFile.Write(padding, 0, padding.Length); } testTime.Stop(); // STOP Log.Warn($"Took {time.ElapsedMilliseconds}ms to save. And {testTime.ElapsedMilliseconds}ms to generate bytes from NBT"); } }
static NbtFile PartialReadTestInternal(NbtFile comp) { byte[] testData = comp.SaveToBuffer(NbtCompression.None); var reader = new NbtReader(new PartialReadStream(new MemoryStream(testData))); var root = (NbtCompound) reader.ReadAsTag(); return new NbtFile(root); }
public static void SaveChunk(ChunkColumn chunk, string basePath) { // WARNING: This method does not consider growing size of the chunks. Needs refactoring to find // free sectors and clear up old ones. It works fine as long as no dynamic data is written // like block entity data (signs etc). Stopwatch time = new Stopwatch(); time.Restart(); chunk.NeedSave = false; var coordinates = new ChunkCoordinates(chunk.x, chunk.z); int width = 32; int depth = 32; int rx = coordinates.X >> 5; int rz = coordinates.Z >> 5; string filePath = Path.Combine(basePath, string.Format(@"region{2}r.{0}.{1}.mca", rx, rz, Path.DirectorySeparatorChar)); Log.Debug($"Save chunk X={chunk.x}, Z={chunk.z} to {filePath}"); if (!File.Exists(filePath)) { // Make sure directory exist Directory.CreateDirectory(Path.Combine(basePath, "region")); // Create empty region file using (var regionFile = File.Open(filePath, FileMode.CreateNew)) { byte[] buffer = new byte[8192]; regionFile.Write(buffer, 0, buffer.Length); } } Stopwatch testTime = new Stopwatch(); using (var regionFile = File.Open(filePath, FileMode.Open)) { // Region files begin with an 8kiB header containing information about which chunks are present in the region file, // when they were last updated, and where they can be found. byte[] buffer = new byte[8192]; regionFile.Read(buffer, 0, buffer.Length); int xi = (coordinates.X % width); if (xi < 0) { xi += 32; } int zi = (coordinates.Z % depth); if (zi < 0) { zi += 32; } int tableOffset = (xi + zi * width) * 4; regionFile.Seek(tableOffset, SeekOrigin.Begin); // Location information for a chunk consists of four bytes split into two fields: the first three bytes are a(big - endian) offset in 4KiB sectors // from the start of the file, and a remaining byte which gives the length of the chunk(also in 4KiB sectors, rounded up). byte[] offsetBuffer = new byte[4]; regionFile.Read(offsetBuffer, 0, 3); Array.Reverse(offsetBuffer); int offset = BitConverter.ToInt32(offsetBuffer, 0) << 4; byte sectorCount = (byte)regionFile.ReadByte(); testTime.Restart(); // RESTART // Seriaize NBT to get lenght NbtFile nbt = CreateNbtFromChunkColumn(chunk); testTime.Stop(); byte[] nbtBuf = nbt.SaveToBuffer(NbtCompression.ZLib); int nbtLength = nbtBuf.Length; byte nbtSectorCount = (byte)Math.Ceiling(nbtLength / 4096d); // Don't write yet, just use the lenght if (offset == 0 || sectorCount == 0 || nbtSectorCount > sectorCount) { if (Log.IsDebugEnabled) { if (sectorCount != 0) { Log.Warn($"Creating new sectors for this chunk even tho it existed. Old sector count={sectorCount}, new sector count={nbtSectorCount} (lenght={nbtLength})"); } } regionFile.Seek(0, SeekOrigin.End); offset = (int)((int)regionFile.Position & 0xfffffff0); regionFile.Seek(tableOffset, SeekOrigin.Begin); byte[] bytes = BitConverter.GetBytes(offset >> 4); Array.Reverse(bytes); regionFile.Write(bytes, 0, 3); regionFile.WriteByte(nbtSectorCount); } byte[] lenghtBytes = BitConverter.GetBytes(nbtLength + 1); Array.Reverse(lenghtBytes); regionFile.Seek(offset, SeekOrigin.Begin); regionFile.Write(lenghtBytes, 0, 4); // Lenght regionFile.WriteByte(0x02); // Compression mode regionFile.Write(nbtBuf, 0, nbtBuf.Length); int reminder; Math.DivRem(nbtLength + 4, 4096, out reminder); byte[] padding = new byte[4096 - reminder]; if (padding.Length > 0) { regionFile.Write(padding, 0, padding.Length); } testTime.Stop(); // STOP Log.Warn($"Took {time.ElapsedMilliseconds}ms to save. And {testTime.ElapsedMilliseconds}ms to generate bytes from NBT"); } }
public void NullParameterTest() { Assert.Throws<ArgumentNullException>(() => new NbtFile((NbtCompound)null)); Assert.Throws<ArgumentNullException>(() => new NbtFile((string)null)); NbtFile file = new NbtFile(); Assert.Throws<ArgumentNullException>(() => file.LoadFromBuffer(null, 0, 1, NbtCompression.None)); Assert.Throws<ArgumentNullException>(() => file.LoadFromBuffer(null, 0, 1, NbtCompression.None, tag => true)); Assert.Throws<ArgumentNullException>(() => file.LoadFromFile(null)); Assert.Throws<ArgumentNullException>(() => file.LoadFromFile(null, NbtCompression.None, tag => true)); Assert.Throws<ArgumentNullException>(() => file.LoadFromStream(null, NbtCompression.AutoDetect)); Assert.Throws<ArgumentNullException>(() => file.LoadFromStream(null, NbtCompression.AutoDetect, tag => true)); Assert.Throws<ArgumentNullException>(() => file.SaveToBuffer(null, 0, NbtCompression.None)); Assert.Throws<ArgumentNullException>(() => file.SaveToFile(null, NbtCompression.None)); Assert.Throws<ArgumentNullException>(() => file.SaveToStream(null, NbtCompression.None)); Assert.Throws<ArgumentNullException>(() => NbtFile.ReadRootTagName(null)); Assert.Throws<ArgumentNullException>( () => NbtFile.ReadRootTagName((Stream)null, NbtCompression.None, true, 0)); }
public void RootTagTest() { NbtCompound oldRoot = new NbtCompound("defaultRoot"); NbtFile newFile = new NbtFile(oldRoot); // Ensure that inappropriate tags are not accepted as RootTag Assert.Throws<ArgumentNullException>(() => newFile.RootTag = null); Assert.Throws<ArgumentException>(() => newFile.RootTag = new NbtCompound()); // Ensure that the root has not changed Assert.AreSame(oldRoot, newFile.RootTag); // Invalidate the root tag, and ensure that expected exception is thrown oldRoot.Name = null; Assert.Throws<NbtFormatException>(() => newFile.SaveToBuffer(NbtCompression.None)); }
static BlockFactory() { for (int i = 0; i < byte.MaxValue * 2; i++) { var block = GetBlockById(i); if (block != null) { if (block.IsTransparent) { TransparentBlocks[block.Id] = 1; } if (block.LightLevel > 0) { LuminousBlocks[block.Id] = (byte)block.LightLevel; } } } NameToId = BuildNameToId(); for (int i = 0; i < LegacyToRuntimeId.Length; ++i) { LegacyToRuntimeId[i] = -1; } var assembly = Assembly.GetAssembly(typeof(Block)); lock (lockObj) { using (var stream = assembly.GetManifestResourceStream(typeof(Block).Namespace + ".blockstates.json")) using (var reader = new StreamReader(stream)) { BlockPalette = BlockPalette.FromJson(reader.ReadToEnd()); } foreach (var record in BlockPalette) { var states = new List <NbtTag>(); foreach (IBlockState state in record.States) { NbtTag stateTag = null; switch (state) { case BlockStateByte blockStateByte: stateTag = new NbtByte(state.Name, blockStateByte.Value); break; case BlockStateInt blockStateInt: stateTag = new NbtInt(state.Name, blockStateInt.Value); break; case BlockStateString blockStateString: stateTag = new NbtString(state.Name, blockStateString.Value); break; default: throw new ArgumentOutOfRangeException(nameof(state)); } states.Add(stateTag); } var nbt = new NbtFile() { BigEndian = false, UseVarInt = true, RootTag = new NbtCompound("states", states) }; byte[] nbtBinary = nbt.SaveToBuffer(NbtCompression.None); record.StatesCacheNbt = nbtBinary; } } int palletSize = BlockPalette.Count; for (int i = 0; i < palletSize; i++) { if (BlockPalette[i].Data > 15) { continue; // TODO: figure out why palette contains blocks with meta more than 15 } if (BlockPalette[i].Data == -1) { continue; // These are blockstates that does not have a metadata mapping } LegacyToRuntimeId[(BlockPalette[i].Id << 4) | (byte)BlockPalette[i].Data] = i; } BlockStates = new HashSet <BlockStateContainer>(BlockPalette); }
public void SerializingEmpty() { // check saving/loading lists of all possible value types var testFile = new NbtFile(new NbtCompound("root") { new NbtList("emptyList", NbtTagType.End), new NbtList("listyList", NbtTagType.List) { new NbtList(NbtTagType.End) } }); byte[] buffer = testFile.SaveToBuffer(NbtCompression.None); testFile.LoadFromBuffer(buffer, 0, buffer.Length, NbtCompression.None); NbtList list1 = testFile.RootTag.Get<NbtList>("emptyList"); Assert.AreEqual(list1.Count,0); Assert.AreEqual(list1.ListType, NbtTagType.End); NbtList list2 = testFile.RootTag.Get<NbtList>("listyList"); Assert.AreEqual(list2.Count,1); Assert.AreEqual(list2.ListType, NbtTagType.List); Assert.AreEqual(list2.Get<NbtList>(0).Count, 0); Assert.AreEqual(list2.Get<NbtList>(0).ListType, NbtTagType.End); }
public void Serializing1() { // check the basics of saving/loading const NbtTagType expectedListType = NbtTagType.Int; const int elements = 10; // construct nbt file var writtenFile = new NbtFile(new NbtCompound("ListTypeTest")); var writtenList = new NbtList("Entities", null, expectedListType); for (int i = 0; i < elements; i++) { writtenList.Add(new NbtInt(i)); } writtenFile.RootTag.Add(writtenList); // test saving byte[] data = writtenFile.SaveToBuffer(NbtCompression.None); // test loading var readFile = new NbtFile(); long bytesRead = readFile.LoadFromBuffer(data, 0, data.Length, NbtCompression.None); Assert.AreEqual(bytesRead, data.Length); // check contents of loaded file Assert.NotNull(readFile.RootTag); Assert.IsInstanceOf<NbtList>(readFile.RootTag["Entities"]); var readList = (NbtList)readFile.RootTag["Entities"]; Assert.AreEqual(writtenList.ListType, readList.ListType); Assert.AreEqual(readList.Count, writtenList.Count); // check .ToArray CollectionAssert.AreEquivalent(readList, readList.ToArray()); CollectionAssert.AreEquivalent(readList, readList.ToArray<NbtInt>()); // check contents of loaded list for (int i = 0; i < elements; i++) { Assert.AreEqual(readList.Get<NbtInt>(i).Value, writtenList.Get<NbtInt>(i).Value); } }
public static void SaveChunk(ChunkColumn chunk, string basePath, int yoffset) { // WARNING: This method does not consider growing size of the chunks. Needs refactoring to find // free sectors and clear up old ones. It works fine as long as no dynamic data is written // like block entity data (signs etc). Log.Debug($"Save chunk X={chunk.x}, Z={chunk.z} to {basePath}"); chunk.NeedSave = false; var coordinates = new ChunkCoordinates(chunk.x, chunk.z); int width = 32; int depth = 32; int rx = coordinates.X >> 5; int rz = coordinates.Z >> 5; string filePath = Path.Combine(basePath, string.Format(@"region{2}r.{0}.{1}.mca", rx, rz, Path.DirectorySeparatorChar)); if (!File.Exists(filePath)) { // Make sure directory exist Directory.CreateDirectory(Path.Combine(basePath, "region")); // Create empty region file using (var regionFile = File.Open(filePath, FileMode.CreateNew)) { byte[] buffer = new byte[8192]; regionFile.Write(buffer, 0, buffer.Length); } return; } using (var regionFile = File.Open(filePath, FileMode.Open)) { byte[] buffer = new byte[8192]; regionFile.Read(buffer, 0, buffer.Length); int xi = (coordinates.X % width); if (xi < 0) { xi += 32; } int zi = (coordinates.Z % depth); if (zi < 0) { zi += 32; } int tableOffset = (xi + zi * width) * 4; regionFile.Seek(tableOffset, SeekOrigin.Begin); byte[] offsetBuffer = new byte[4]; regionFile.Read(offsetBuffer, 0, 3); Array.Reverse(offsetBuffer); int offset = BitConverter.ToInt32(offsetBuffer, 0) << 4; int length = regionFile.ReadByte(); // Seriaize NBT to get lenght NbtFile nbt = CreateNbtFromChunkColumn(chunk, yoffset); byte[] nbtBuf = nbt.SaveToBuffer(NbtCompression.ZLib); int nbtLength = nbtBuf.Length; // Don't write yet, just use the lenght if (offset == 0 || length == 0 || nbtLength < length) { if (length != 0) { Log.Debug("Creating new sectors for this chunk even tho it existed"); } regionFile.Seek(0, SeekOrigin.End); offset = (int)((int)regionFile.Position & 0xfffffff0); regionFile.Seek(tableOffset, SeekOrigin.Begin); byte[] bytes = BitConverter.GetBytes(offset >> 4); Array.Reverse(bytes); regionFile.Write(bytes, 0, 3); regionFile.WriteByte(1); } byte[] lenghtBytes = BitConverter.GetBytes(nbtLength + 1); Array.Reverse(lenghtBytes); regionFile.Seek(offset, SeekOrigin.Begin); regionFile.Write(lenghtBytes, 0, 4); // Lenght regionFile.WriteByte(0x02); // Compression mode regionFile.Write(nbtBuf, 0, nbtBuf.Length); int reminder; Math.DivRem(nbtLength + 4, 4096, out reminder); byte[] padding = new byte[4096 - reminder]; if (padding.Length > 0) { regionFile.Write(padding, 0, padding.Length); } } }
static BlockFactory() { for (int i = 0; i < byte.MaxValue * 2; i++) { var block = GetBlockById(i); if (block != null) { if (block.IsTransparent) { TransparentBlocks[block.Id] = 1; } if (block.LightLevel > 0) { LuminousBlocks[block.Id] = (byte)block.LightLevel; } } } NameToId = BuildNameToId(); for (int i = 0; i < LegacyToRuntimeId.Length; ++i) { LegacyToRuntimeId[i] = -1; } var assembly = Assembly.GetAssembly(typeof(Block)); lock (lockObj) { Dictionary <string, int> idMapping = new Dictionary <string, int>(ResourceUtil.ReadResource <Dictionary <string, int> >("block_id_map.json", typeof(Block), "Data"), StringComparer.OrdinalIgnoreCase); int runtimeId = 0; BlockPalette = new BlockPalette(); using (var stream = assembly.GetManifestResourceStream(typeof(Block).Namespace + ".Data.canonical_block_states.nbt")) { do { var compound = Packet.ReadNbtCompound(stream, true); var container = GetBlockStateContainer(compound); container.RuntimeId = runtimeId++; BlockPalette.Add(container); } while (stream.Position < stream.Length); } List <R12ToCurrentBlockMapEntry> legacyStateMap = new List <R12ToCurrentBlockMapEntry>(); using (var stream = assembly.GetManifestResourceStream(typeof(Block).Namespace + ".Data.r12_to_current_block_map.bin")) { while (stream.Position < stream.Length) { var length = VarInt.ReadUInt32(stream); byte[] bytes = new byte[length]; stream.Read(bytes, 0, bytes.Length); string stringId = Encoding.UTF8.GetString(bytes); bytes = new byte[2]; stream.Read(bytes, 0, bytes.Length); var meta = BitConverter.ToInt16(bytes); var compound = Packet.ReadNbtCompound(stream, true); legacyStateMap.Add(new R12ToCurrentBlockMapEntry(stringId, meta, GetBlockStateContainer(compound))); } } Dictionary <string, List <int> > idToStatesMap = new Dictionary <string, List <int> >(StringComparer.OrdinalIgnoreCase); for (var index = 0; index < BlockPalette.Count; index++) { var state = BlockPalette[index]; List <int> candidates; if (!idToStatesMap.TryGetValue(state.Name, out candidates)) { candidates = new List <int>(); } candidates.Add(index); idToStatesMap[state.Name] = candidates; } foreach (var pair in legacyStateMap) { if (!idMapping.TryGetValue(pair.StringId, out int id)) { continue; } var data = pair.Meta; if (data > 15) { continue; } var mappedState = pair.State; var mappedName = pair.State.Name; if (!idToStatesMap.TryGetValue(mappedName, out var matching)) { continue; } foreach (var match in matching) { var networkState = BlockPalette[match]; var thisStates = new HashSet <IBlockState>(mappedState.States); var otherStates = new HashSet <IBlockState>(networkState.States); otherStates.IntersectWith(thisStates); if (otherStates.Count == thisStates.Count) { BlockPalette[match].Id = id; BlockPalette[match].Data = data; BlockPalette[match].ItemInstance = new ItemPickInstance() { Id = (short)id, Metadata = data, WantNbt = false }; LegacyToRuntimeId[(id << 4) | (byte)data] = match; break; } } } foreach (var record in BlockPalette) { var states = new List <NbtTag>(); foreach (IBlockState state in record.States) { NbtTag stateTag = null; switch (state) { case BlockStateByte blockStateByte: stateTag = new NbtByte(state.Name, blockStateByte.Value); break; case BlockStateInt blockStateInt: stateTag = new NbtInt(state.Name, blockStateInt.Value); break; case BlockStateString blockStateString: stateTag = new NbtString(state.Name, blockStateString.Value); break; default: throw new ArgumentOutOfRangeException(nameof(state)); } states.Add(stateTag); } var nbt = new NbtFile() { BigEndian = false, UseVarInt = true, RootTag = new NbtCompound("states", states) }; byte[] nbtBinary = nbt.SaveToBuffer(NbtCompression.None); record.StatesCacheNbt = nbtBinary; } } BlockStates = new HashSet <BlockStateContainer>(BlockPalette); }
public void NestedListAndCompoundTest() { byte[] data; { var root = new NbtCompound("Root"); var outerList = new NbtList("OuterList", NbtTagType.Compound); var outerCompound = new NbtCompound(); var innerList = new NbtList("InnerList", NbtTagType.Compound); var innerCompound = new NbtCompound(); innerList.Add(innerCompound); outerCompound.Add(innerList); outerList.Add(outerCompound); root.Add(outerList); var file = new NbtFile(root); data = file.SaveToBuffer(NbtCompression.None); } { var file = new NbtFile(); long bytesRead = file.LoadFromBuffer(data, 0, data.Length, NbtCompression.None); Assert.AreEqual(bytesRead, data.Length); Assert.AreEqual(1, file.RootTag.Get<NbtList>("OuterList").Count); Assert.AreEqual(null, file.RootTag.Get<NbtList>("OuterList").Get<NbtCompound>(0).Name); Assert.AreEqual(1, file.RootTag.Get<NbtList>("OuterList") .Get<NbtCompound>(0) .Get<NbtList>("InnerList") .Count); Assert.AreEqual(null, file.RootTag.Get<NbtList>("OuterList") .Get<NbtCompound>(0) .Get<NbtList>("InnerList") .Get<NbtCompound>(0) .Name); } }
static BlockFactory() { for (int i = 0; i < byte.MaxValue * 2; i++) { var block = GetBlockById(i); if (block != null) { if (block.IsTransparent) { TransparentBlocks[block.Id] = 1; } if (block.LightLevel > 0) { LuminousBlocks[block.Id] = (byte)block.LightLevel; } } } NameToId = BuildNameToId(); for (int i = 0; i < LegacyToRuntimeId.Length; ++i) { LegacyToRuntimeId[i] = -1; } var assembly = Assembly.GetAssembly(typeof(Block)); lock (lockObj) { Dictionary <string, int> idMapping; using (var stream = assembly.GetManifestResourceStream(typeof(Block).Namespace + ".block_id_map.json")) using (var reader = new StreamReader(stream)) { idMapping = JsonConvert.DeserializeObject <Dictionary <string, int> >(reader.ReadToEnd()); } Dictionary <string, short> itemIdMapping; using (var stream = assembly.GetManifestResourceStream(typeof(Block).Namespace + ".item_id_map.json")) using (var reader = new StreamReader(stream)) { itemIdMapping = JsonConvert.DeserializeObject <Dictionary <string, short> >(reader.ReadToEnd()); } int runtimeId = 0; BlockPalette = new BlockPalette(); using (var stream = assembly.GetManifestResourceStream(typeof(Block).Namespace + ".canonical_block_states.nbt")) { var reader = new NbtFile(); reader.UseVarInt = true; reader.AllowAlternativeRootTag = true; do { reader.LoadFromStream(stream, NbtCompression.AutoDetect); var record = new BlockStateContainer(); var tag = reader.RootTag; string name = tag["name"].StringValue; record.Name = name; record.States = new List <IBlockState>(); if (idMapping.TryGetValue(name, out var id)) { record.Id = id; } var states = tag["states"]; if (states != null && states is NbtCompound compound) { foreach (var stateEntry in compound) { switch (stateEntry) { case NbtInt nbtInt: record.States.Add(new BlockStateInt() { Name = nbtInt.Name, Value = nbtInt.Value }); break; case NbtByte nbtByte: record.States.Add(new BlockStateByte() { Name = nbtByte.Name, Value = nbtByte.Value }); break; case NbtString nbtString: record.States.Add(new BlockStateString() { Name = nbtString.Name, Value = nbtString.Value }); break; } } } if (itemIdMapping.TryGetValue(name, out var itemId)) { record.ItemInstance = new ItemPickInstance() { Id = itemId, WantNbt = false, Metadata = 0 }; } record.RuntimeId = runtimeId++; BlockPalette.Add(record); } while (stream.Position < stream.Length); } /*using (var stream = assembly.GetManifestResourceStream(typeof(Block).Namespace + ".blockstates.json")) * using (var reader = new StreamReader(stream)) * { * BlockPalette = BlockPalette.FromJson(reader.ReadToEnd()); * }*/ foreach (var record in BlockPalette) { var states = new List <NbtTag>(); foreach (IBlockState state in record.States) { NbtTag stateTag = null; switch (state) { case BlockStateByte blockStateByte: stateTag = new NbtByte(state.Name, blockStateByte.Value); break; case BlockStateInt blockStateInt: stateTag = new NbtInt(state.Name, blockStateInt.Value); break; case BlockStateString blockStateString: stateTag = new NbtString(state.Name, blockStateString.Value); break; default: throw new ArgumentOutOfRangeException(nameof(state)); } states.Add(stateTag); } var nbt = new NbtFile() { BigEndian = false, UseVarInt = true, RootTag = new NbtCompound("states", states) }; byte[] nbtBinary = nbt.SaveToBuffer(NbtCompression.None); record.StatesCacheNbt = nbtBinary; } } int palletSize = BlockPalette.Count; for (int i = 0; i < palletSize; i++) { if (BlockPalette[i].Data > 15) { continue; // TODO: figure out why palette contains blocks with meta more than 15 } if (BlockPalette[i].Data == -1) { continue; // These are blockstates that does not have a metadata mapping } LegacyToRuntimeId[(BlockPalette[i].Id << 4) | (byte)BlockPalette[i].Data] = i; } BlockStates = new HashSet <BlockStateContainer>(BlockPalette); }