public void Initialize() { BasePath = BasePath ?? Config.GetProperty("LevelDBWorldFolder", "World").Trim(); var directory = new DirectoryInfo(Path.Combine(BasePath, "db")); var levelFileName = Path.Combine(BasePath, "level.dat"); Log.Debug($"Loading level.dat from {levelFileName}"); if (File.Exists(levelFileName)) { var file = new NbtFile { BigEndian = false, UseVarInt = false }; var levelStream = File.OpenRead(levelFileName); levelStream.Seek(8, SeekOrigin.Begin); file.LoadFromStream(levelStream, NbtCompression.None); Log.Debug($"Level DAT\n{file.RootTag}"); NbtTag dataTag = file.RootTag["Data"]; //LevelInfo = new LevelInfoBedrock(dataTag); } else { Log.Warn($"No level.dat found at {levelFileName}. Creating empty."); LevelInfo = new LevelInfoBedrock(); } var db = new Database(directory); db.Open(); _db = db; MissingChunkProvider?.Initialize(); }
public void ValueTest() { // write one named tag for every value type, and read it back using (var ms = new MemoryStream()) { var writer = new NbtWriter(ms, "root"); Assert.AreEqual(ms, writer.BaseStream); { writer.WriteByte("byte", 1); writer.WriteShort("short", 2); writer.WriteInt("int", 3); writer.WriteLong("long", 4L); writer.WriteFloat("float", 5f); writer.WriteDouble("double", 6d); writer.WriteByteArray("byteArray", new byte[] { 10, 11, 12 }); writer.WriteIntArray("intArray", new[] { 20, 21, 22 }); writer.WriteString("string", "123"); } Assert.IsFalse(writer.IsDone); writer.EndCompound(); Assert.IsTrue(writer.IsDone); writer.Finish(); ms.Position = 0; var file = new NbtFile(); file.LoadFromStream(ms, NbtCompression.None); TestFiles.AssertValueTest(file); } }
public void ByteArrayFromStream() { var data = new byte[64*1024]; for (int i = 0; i < data.Length; i++) { data[i] = unchecked((byte)i); } using (var ms = new MemoryStream()) { var writer = new NbtWriter(ms, "root"); { byte[] buffer = new byte[1024]; using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray("byteArray1", dataStream, data.Length); } using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray("byteArray2", dataStream, data.Length, buffer); } using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray("byteArray3", dataStream, 1); } using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray("byteArray4", dataStream, 1, buffer); } writer.BeginList("innerLists", NbtTagType.ByteArray, 4); using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray(dataStream, data.Length); } using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray(dataStream, data.Length, buffer); } using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray(dataStream, 1); } using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray(dataStream, 1, buffer); } writer.EndList(); } writer.EndCompound(); writer.Finish(); ms.Position = 0; var file = new NbtFile(); file.LoadFromStream(ms, NbtCompression.None); CollectionAssert.AreEqual(data, file.RootTag["byteArray1"].ByteArrayValue); CollectionAssert.AreEqual(data, file.RootTag["byteArray2"].ByteArrayValue); Assert.AreEqual(1, file.RootTag["byteArray3"].ByteArrayValue.Length); Assert.AreEqual(data[0], file.RootTag["byteArray3"].ByteArrayValue[0]); Assert.AreEqual(1, file.RootTag["byteArray4"].ByteArrayValue.Length); Assert.AreEqual(data[0], file.RootTag["byteArray4"].ByteArrayValue[0]); CollectionAssert.AreEqual(data, file.RootTag["innerLists"][0].ByteArrayValue); CollectionAssert.AreEqual(data, file.RootTag["innerLists"][1].ByteArrayValue); Assert.AreEqual(1, file.RootTag["innerLists"][2].ByteArrayValue.Length); Assert.AreEqual(data[0], file.RootTag["innerLists"][2].ByteArrayValue[0]); Assert.AreEqual(1, file.RootTag["innerLists"][3].ByteArrayValue.Length); Assert.AreEqual(data[0], file.RootTag["innerLists"][3].ByteArrayValue[0]); } }
public static Nbt ReadNbt(Stream stream, bool useVarInt = false) { if (stream.ReadByte() == 255) { return(null); } stream.Position -= 1; Nbt nbt = new Nbt(); NbtFile nbtFile = new NbtFile(); nbtFile.BigEndian = false; nbtFile.UseVarInt = useVarInt; nbtFile.LoadFromStream(stream, NbtCompression.None); /*var reader = new NbtBinaryReader(stream, false) { * Selector = null, * UseVarInt = useVarInt * }; * * nbtFile.RootTag = ReadUnknownTag(reader);*/ nbt.NbtFile = nbtFile; return(nbt); }
private void LoadFromFile(string directory) { NbtFile file = new NbtFile(); using (var stream = File.Open(Path.Combine(LevelDirectory, "level.dat"), FileMode.Open)) file.LoadFromStream(stream, NbtCompression.None, null); var data = file.RootTag.Get <NbtCompound>("Data"); var serializer = new NbtSerializer(typeof(SavedLevel)); SavedLevel level = (SavedLevel)serializer.Deserialize(data); Name = level.LevelName; Time = level.Time; GameMode = (GameMode)level.GameMode; MapFeatures = level.MapFeatures; Seed = level.Seed; // Find world generator string generatorName = level.GeneratorName; WorldGenerator = GetGenerator(generatorName); WorldGenerator.Seed = Seed; GeneratorOptions = level.GeneratorOptions; WorldGenerator.Initialize(this); SpawnPoint = level.SpawnPoint; World = new World(this, WorldGenerator, Path.Combine(directory, "region")); }
public override NbtCompound Read() { var v = new NbtFile(); v.LoadFromStream(Stream, NbtCompression.None); return(v.RootTag); }
public void ReloadNonSeekableStream() { var loadedFile = new NbtFile(TestFiles.Big); using (var ms = new MemoryStream()) { using (var nss = new NonSeekableStream(ms)) { long bytesWritten = loadedFile.SaveToStream(nss, NbtCompression.None); ms.Position = 0; Assert.Throws <NotSupportedException>(() => loadedFile.LoadFromStream(nss, NbtCompression.AutoDetect)); ms.Position = 0; Assert.Throws <InvalidDataException>(() => loadedFile.LoadFromStream(nss, NbtCompression.ZLib, NbtVersion.Legacy)); ms.Position = 0; long bytesRead = loadedFile.LoadFromStream(nss, NbtCompression.None, NbtVersion.Legacy); Assert.AreEqual(bytesWritten, bytesRead); TestFiles.AssertNbtBigFile(loadedFile); } } }
void LoadFromStreamInternal(String fileName, NbtCompression compression) { var file = new NbtFile(); byte[] fileBytes = File.ReadAllBytes(fileName); using (var ms = new MemoryStream(fileBytes)) { file.LoadFromStream(ms, compression); } }
/// <summary> /// Retrieves the requested chunk from the region, or /// generates it if a world generator is provided. /// </summary> /// <param name="position">The position of the requested local chunk coordinates.</param> public Chunk GetChunk(Vector3 position) { // TODO: This could use some refactoring lock (Chunks) { if (!Chunks.ContainsKey(position)) { if (regionFile != null) { // Search the stream for that region lock (regionFile) { var chunkData = GetChunkFromTable(position); if (chunkData == null) { if (WorldGenerator == null) { throw new ArgumentException("The requested chunk is not loaded.", "position"); } GenerateChunk(position); return(Chunks[position]); } regionFile.Seek(chunkData.Item1, SeekOrigin.Begin); int length = new MinecraftStream(regionFile).ReadInt32(); // TODO: Avoid making new objects here, and in the WriteInt32 int compressionMode = regionFile.ReadByte(); switch (compressionMode) { case 1: // gzip break; case 2: // zlib var nbt = new NbtFile(); nbt.LoadFromStream(regionFile, NbtCompression.ZLib, null); var chunk = Chunk.FromNbt(position, nbt); chunk.ParentRegion = this; Chunks.Add(position, chunk); break; default: throw new InvalidDataException("Invalid compression scheme provided by region file."); } } } else if (WorldGenerator == null) { throw new ArgumentException("The requested chunk is not loaded.", "position"); } else { Chunks.Add(position, WorldGenerator.GenerateChunk(position, this)); } } return(Chunks[position]); } }
public static NbtCompound ReadNbtCompound(Stream stream) { NbtFile file = new NbtFile(); file.BigEndian = false; file.UseVarInt = false; file.LoadFromStream(stream, NbtCompression.None); return((NbtCompound)file.RootTag); }
public Nbt ReadNbt() { Nbt nbt = new Nbt(); NbtFile file = new NbtFile(); file.BigEndian = false; nbt.NbtFile = file; file.LoadFromStream(_reader.BaseStream, NbtCompression.None); return(nbt); }
public void LoadingBigFileStream() { byte[] fileBytes = File.ReadAllBytes(TestFiles.Big); using (var ms = new MemoryStream(fileBytes)) { using (var nss = new NonSeekableStream(ms)) { var file = new NbtFile(); long length = file.LoadFromStream(nss, NbtCompression.None, NbtVersion.Legacy, null); TestFiles.AssertNbtBigFile(file); Assert.AreEqual(length, new FileInfo(TestFiles.Big).Length); } } }
public Nbt ReadNbt() { Nbt nbt = new Nbt(); NbtFile file = new NbtFile(); file.BigEndian = false; file.UseVarInt = this is McpeBlockEntityData; nbt.NbtFile = file; file.LoadFromStream(_reader.BaseStream, NbtCompression.None); return(nbt); }
public void LoadingBigFileStream() { byte[] fileBytes = File.ReadAllBytes(TestFiles.Big); using (var ms = new MemoryStream(fileBytes)) { using (var nss = new NonSeekableStream(ms)) { var file = new NbtFile(); long length = file.LoadFromStream(nss, NbtCompression.None, null); TestFiles.AssertNbtBigFile(file); Assert.AreEqual(length, new FileInfo(TestFiles.Big).Length); } } }
public void LoadingBigFileStream() { byte[] fileBytes = File.ReadAllBytes("TestFiles/bigtest.nbt"); using (MemoryStream ms = new MemoryStream(fileBytes)) { using (NonSeekableStream nss = new NonSeekableStream(ms)) { var file = new NbtFile(); int length = file.LoadFromStream(nss, NbtCompression.None, null); AssertNbtBigFile(file); Assert.AreEqual(length, new FileInfo("TestFiles/bigtest.nbt").Length); } } }
public void LoadingBigFileStream() { byte[] fileBytes = File.ReadAllBytes( "TestFiles/bigtest.nbt" ); using( MemoryStream ms = new MemoryStream( fileBytes ) ) { using( NonSeekableStream nss = new NonSeekableStream( ms ) ) { var file = new NbtFile(); int length = file.LoadFromStream( nss, NbtCompression.None, null ); AssertNbtBigFile( file ); Assert.AreEqual( length, new FileInfo( "TestFiles/bigtest.nbt" ).Length ); } } }
public void Initialize() { BasePath ??= Config.GetProperty("LevelDBWorldFolder", "World").Trim(); var directory = new DirectoryInfo(Path.Combine(BasePath, "db")); var levelFileName = Path.Combine(BasePath, "level.dat"); Log.Debug($"Loading level.dat from {levelFileName}"); if (File.Exists(levelFileName)) { var file = new NbtFile { BigEndian = false, UseVarInt = false }; using FileStream stream = File.OpenRead(levelFileName); stream.Seek(8, SeekOrigin.Begin); file.LoadFromStream(stream, NbtCompression.None); Log.Debug($"Level DAT\n{file.RootTag}"); LevelInfo = file.RootTag.Deserialize <LevelInfoBedrock>(); } else { Log.Warn($"No level.dat found at {levelFileName}. Creating empty."); LevelInfo = new LevelInfoBedrock(); } // We must reuse the same DB for all providers (dimensions) in LevelDB. if (Db == null) { var db = new Database(directory) { CreateIfMissing = true }; db.Open(); Db = db; directory.Refresh(); // refresh create state if this dir didn't exist // Shutdown hook. Must use to flush in memory log of LevelDB. AppDomain.CurrentDomain.ProcessExit += (sender, args) => { SaveChunks(); Log.Warn("Closing LevelDB"); Db.Close(); }; } MissingChunkProvider?.Initialize(this); }
public void NullParameterTest() { Assert.Throws <ArgumentNullException>(() => new NbtFile((NbtCompound)null)); Assert.Throws <ArgumentNullException>(() => new NbtFile((string)null)); NbtFile file = new NbtFile(); Assert.Throws <ArgumentNullException>(() => file.LoadFromBuffer(null, 0, 1, NbtCompression.None)); Assert.Throws <ArgumentNullException>(() => file.LoadFromBuffer(null, 0, 1, NbtCompression.None, tag => true)); Assert.Throws <ArgumentNullException>(() => file.LoadFromFile(null)); Assert.Throws <ArgumentNullException>(() => file.LoadFromFile(null, NbtCompression.None, tag => true)); Assert.Throws <ArgumentNullException>(() => file.LoadFromStream(null, NbtCompression.AutoDetect)); Assert.Throws <ArgumentNullException>(() => file.LoadFromStream(null, NbtCompression.AutoDetect, tag => true)); Assert.Throws <ArgumentNullException>(() => file.SaveToBuffer(null, 0, NbtCompression.None)); Assert.Throws <ArgumentNullException>(() => file.SaveToFile(null, NbtCompression.None)); Assert.Throws <ArgumentNullException>(() => file.SaveToStream(null, NbtCompression.None)); Assert.Throws <ArgumentNullException>(() => NbtFile.ReadRootTagName(null)); Assert.Throws <ArgumentNullException>( () => NbtFile.ReadRootTagName((Stream)null, NbtCompression.None, true, 0)); }
public static Nbt ReadLegacyNbt(Stream stream) { Nbt nbt = new Nbt(); NbtFile nbtFile = new NbtFile(); nbtFile.BigEndian = false; nbtFile.UseVarInt = true; nbtFile.AllowAlternativeRootTag = true; nbt.NbtFile = nbtFile; nbtFile.LoadFromStream(stream, NbtCompression.None); return(nbt); }
public void Save() { NbtFile file = new NbtFile(); var serializer = new NbtSerializer(typeof(SavedLevel)); var level = new SavedLevel { IsRaining = Raining, GeneratorVersion = 0, Time = Time, GameMode = (int)GameMode, MapFeatures = MapFeatures, GeneratorName = WorldGenerator.GeneratorName, Initialized = true, Seed = Seed, SpawnPoint = SpawnPoint, SizeOnDisk = 0, ThunderTime = ThunderTime, RainTime = RainTime, Version = 19133, Thundering = Thundering, LevelName = Name, LastPlayed = DateTime.UtcNow.Ticks }; if (!string.IsNullOrEmpty(PlayerName)) { if (File.Exists(Path.Combine(LevelDirectory, "players", PlayerName + ".dat"))) { var player = new NbtFile(); using (Stream stream = File.Open(Path.Combine(LevelDirectory, "players", PlayerName + ".dat"), FileMode.Open)) player.LoadFromStream(stream, NbtCompression.GZip, null); level.Player = player.RootTag; level.Player.Name = "Player"; } } var data = serializer.Serialize(level); file.RootTag = new NbtCompound(""); file.RootTag.Add(data); using (var stream = File.Create(Path.Combine(LevelDirectory, "level.dat"))) file.SaveToStream(stream, NbtCompression.GZip); if (World.Directory == null) { World.Save(Path.Combine(LevelDirectory, "region")); } else { World.Save(); } }
public void GenerateLevelInfoPropertiesTest() { var file = new NbtFile { BigEndian = false, UseVarInt = false }; using FileStream stream = File.OpenRead(@"C:\Temp\TrashBedrockWorld\level.dat"); stream.Seek(8, SeekOrigin.Begin); file.LoadFromStream(stream, NbtCompression.None); foreach (NbtTag tag in (NbtCompound)file.RootTag) { NbtTagType tagTagType = tag.TagType; string type = tagTagType switch { NbtTagType.Int => "int", NbtTagType.Byte => "byte", NbtTagType.Short => "short", NbtTagType.Long => "long", NbtTagType.Float => "float", NbtTagType.Double => "double", NbtTagType.ByteArray => "byte[]", NbtTagType.String => "string", //NbtTagType.List => throw new NotImplementedException(), //NbtTagType.Compound => throw new NotImplementedException(), NbtTagType.IntArray => "int[]", NbtTagType.LongArray => "long[]", _ => null }; string comment = ""; if (type == null) { comment = "//"; } string attribute = ""; string name = UppercaseFirst(tag.Name); if (name != tag.Name) { attribute = $"[JsonPropertyName(\"{tag.Name}\")] "; } Console.WriteLine($"\t\t{comment}{attribute}public {type} {name} {{ get; set; }}"); } }
public void ReloadNonSeekableStream() { var loadedFile = new NbtFile("TestFiles/bigtest.nbt"); using (var ms = new MemoryStream()) using (var nss = new NonSeekableStream(ms)) { var bytesWritten = loadedFile.SaveToStream(nss, NbtCompression.None); ms.Position = 0; var bytesRead = loadedFile.LoadFromStream(nss, NbtCompression.None, null); Assert.AreEqual(bytesWritten, bytesRead); AssertNbtBigFile(loadedFile); } }
public override Level Read(Stream src, string name, bool metadata) { NbtFile file = new NbtFile(); file.LoadFromStream(src); Level lvl; ReadData(file.RootTag, name, out lvl); if (!metadata) { return(lvl); } ReadMetadata(file.RootTag, lvl); return(lvl); }
public void FromNbtTest() { var file = new NbtFile { BigEndian = false, UseVarInt = false }; //var levelStream = File.OpenRead(@"C:\Temp\TrashBedrockWorld\level.dat"); using FileStream stream = File.OpenRead(@"C:\Temp\TrashBedrockWorld\level_test_generated.dat"); var header = new Span <byte>(new byte[4]); stream.Read(header); Console.WriteLine($"File version:{BitConverter.ToInt32(header)}"); stream.Seek(4, SeekOrigin.Current); file.LoadFromStream(stream, NbtCompression.None); var levelInfo = LevelInfoBedrock.FromNbt(file.RootTag); Assert.IsNotNull(levelInfo); //public int GameType { get; set; } Assert.AreEqual(1, levelInfo.GameType); //public int Generator { get; set; } Assert.AreEqual(1, levelInfo.Generator); //public long LastPlayed { get; set; } Assert.AreEqual(1594911609, levelInfo.LastPlayed); //public string LevelName { get; set; } Assert.AreEqual("BedrockGeneratedLevel", levelInfo.LevelName); //public int Platform { get; set; } Assert.AreEqual(2, levelInfo.Platform); //public long RandomSeed { get; set; } Assert.AreEqual(3429004588, levelInfo.RandomSeed); //public int SpawnX { get; set; } Assert.AreEqual(44, levelInfo.SpawnX); //public int SpawnY { get; set; } Assert.AreEqual(32767, levelInfo.SpawnY); ////public int SpawnZ { get; set; } Assert.AreEqual(4, levelInfo.SpawnZ); ////public long Time { get; set; } Assert.AreEqual(269000, levelInfo.Time); ////public long SpawnMobs { get; set; } Assert.AreEqual(1, levelInfo.SpawnMobs); }
public NbtCompound ReadNbtCompound() { NbtTagType t = (NbtTagType)ReadByte(); if (t != NbtTagType.Compound) { return(null); } Position--; NbtFile file = new NbtFile() { BigEndian = true, UseVarInt = false }; file.LoadFromStream(this, NbtCompression.None); return((NbtCompound)file.RootTag); }
public void ComplexStringsTest() { // Use a fixed seed for repeatability of this test Random rand = new Random(0); // Generate random Unicode strings const int numStrings = 1024; List <string> writtenStrings = new List <string>(); for (int i = 0; i < numStrings; i++) { writtenStrings.Add(GenRandomUnicodeString(rand)); } using (var ms = new MemoryStream()) { // Write a list of strings NbtWriter writer = new NbtWriter(ms, "test"); writer.BeginList("stringList", NbtTagType.String, numStrings); foreach (string s in writtenStrings) { writer.WriteString(s); } writer.EndList(); writer.EndCompound(); writer.Finish(); // Rewind! ms.Position = 0; // Let's read what we have written, and check contents NbtFile file = new NbtFile(); file.LoadFromStream(ms, NbtCompression.None); NbtCompound rootTag = (NbtCompound)file.RootTag; var readStrings = rootTag.Get <NbtList>("stringList") .ToArray <NbtString>() .Select(tag => tag.StringValue); // Make sure that all read/written strings match exactly CollectionAssert.AreEqual(writtenStrings, readStrings); } }
public override Level Read(Stream src, string name, bool metadata) { NbtFile file = new NbtFile(); file.LoadFromStream(src); Level lvl; ReadData(file.RootTag, name, out lvl); if (!metadata) { return(lvl); } if (file.RootTag.Contains("Metadata")) { ReadMetadata((NbtCompound)file.RootTag["Metadata"], lvl); } return(lvl); }
public void ByteArrayFromStream() { var data = new byte[64*1024]; for (int i = 0; i < data.Length; i++) { data[i] = unchecked((byte)i); } using (var ms = new MemoryStream()) { var writer = new NbtWriter(ms, "root"); { using (var dataStream = new NonSeekableStream(new MemoryStream(data))) { writer.WriteByteArray("byteArray", dataStream, data.Length); } } writer.EndCompound(); writer.Finish(); ms.Position = 0; var file = new NbtFile(); file.LoadFromStream(ms, NbtCompression.None); CollectionAssert.AreEqual(file.RootTag["byteArray"].ByteArrayValue, data); } }
//[Test] //public void LevelDbGetValueFromMissingKey() //{ // using var db = new Database(new DirectoryInfo(@"C:\Development\Other\bedrock-server-1.14.1.4\worlds\BedrockGeneratedLevel\db")); // db.Open(); // int x = 15; // int z = 6; // Log.Warn("Looking for version"); // var versionKey = BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x76}).ToArray(); // var version = db.Get(versionKey); // Assert.AreEqual(15, version.First()); // Log.Warn("Looking for key"); // Assert.NotNull(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 0}).ToArray())); // Assert.NotNull(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 1}).ToArray())); // Assert.NotNull(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 2}).ToArray())); // Assert.NotNull(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 3}).ToArray())); // Assert.NotNull(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 4}).ToArray())); // Assert.NotNull(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 5}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 6}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 7}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 8}).ToArray())); // Fail?? // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 9}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 10}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 11}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 12}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 13}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 14}).ToArray())); // Assert.Null(db.Get(BitConverter.GetBytes(x).Concat(BitConverter.GetBytes(z)).Concat(new byte[] {0x2f, 15}).ToArray())); //} private void ParseChunk(ReadOnlySpan <byte> data) { var reader = new SpanReader(data); var version = reader.ReadByte(); Assert.AreEqual(8, version); // new palette-based chunk format var storageSize = reader.ReadByte(); for (int i = 0; i < storageSize; i++) { var bitsPerBlock = reader.ReadByte() >> 1; Assert.AreEqual(4, bitsPerBlock); int numberOfBytes = 4096 / (32 / bitsPerBlock) * 4; var blockData = reader.Read(numberOfBytes); Assert.AreEqual(4096 / 2, blockData.Length); int paletteSize = reader.ReadInt32(); Assert.AreEqual(12, paletteSize); for (int j = 0; j < paletteSize; j++) { NbtFile file = new NbtFile(); file.BigEndian = false; file.UseVarInt = false; var buffer = data.Slice(reader.Position).ToArray(); int numberOfBytesRead = (int)file.LoadFromStream(new MemoryStream(buffer), NbtCompression.None); reader.Position += numberOfBytesRead; Console.WriteLine(file.RootTag); Assert.NotZero(numberOfBytesRead); } } }
public override Level Read(Stream src, string name, bool metadata) { NbtFile file = new NbtFile(); file.LoadFromStream(src); NbtCompound root = file.RootTag; byte[] raw = root["Blocks"].ByteArrayValue; byte[] meta = root["Data"].ByteArrayValue; int width = root["Width"].ShortValue; int height = root["Height"].ShortValue; int length = root["Length"].ShortValue; Level lvl = new Level(name, (ushort)width, (ushort)height, (ushort)length); byte[] blocks = lvl.blocks; for (int i = 0; i < blocks.Length; i++) { blocks[i] = (byte)mcConv[raw[i], meta[i] & 0x0F]; } for (int i = 0; i < blocks.Length; i++) { byte block = blocks[i]; if (block < Block.CPE_COUNT) { continue; } blocks[i] = Block.custom_block; ushort x, y, z; lvl.IntToPos(i, out x, out y, out z); lvl.FastSetExtTile(x, y, z, block); } return(lvl); }
public ChunkColumn GetChunk(int X, int Z) { var width = 32; var depth = 32; var rx = X >> 5; var rz = Z >> 5; var filePath = Path.Combine(_basePath, string.Format(@"region\r.{0}.{1}.mca", rx, rz)); if (!File.Exists(filePath)) { return(_backEndGenerator.GenerateChunkColumn(new Vector2(X, Z))); } using (var regionFile = File.OpenRead(filePath)) { var buffer = new byte[8192]; regionFile.Read(buffer, 0, 8192); var xi = (X % width); if (xi < 0) { xi += 32; } var zi = (Z % depth); if (zi < 0) { zi += 32; } var tableOffset = (xi + zi * width) * 4; regionFile.Seek(tableOffset, SeekOrigin.Begin); var offsetBuffer = new byte[4]; regionFile.Read(offsetBuffer, 0, 3); Array.Reverse(offsetBuffer); var offset = BitConverter.ToInt32(offsetBuffer, 0) << 4; var length = regionFile.ReadByte(); //if (offset == 0 || length == 0) return _backEndGenerator.GenerateChunkColumn(new Vector2(X, Z)); if (offset == 0 || length == 0) { return(_backEndGenerator.GenerateChunkColumn(new Vector2(X, Z))); } regionFile.Seek(offset, SeekOrigin.Begin); var waste = new byte[4]; regionFile.Read(waste, 0, 4); var compressionMode = regionFile.ReadByte(); var nbt = new NbtFile(); nbt.LoadFromStream(regionFile, NbtCompression.ZLib); var dataTag = nbt.RootTag["Level"]; var sections = dataTag["Sections"] as NbtList; var chunk = new ChunkColumn { X = X, Z = Z, BiomeId = dataTag["Biomes"].ByteArrayValue }; for (var i = 0; i < chunk.BiomeId.Length; i++) { if (chunk.BiomeId[i] > 22) { chunk.BiomeId[i] = 0; } } if (chunk.BiomeId.Length > 256) { throw new Exception(); } // This will turn into a full chunk column foreach (var sectionTag in sections) { var sy = sectionTag["Y"].ByteValue * 16; var blocks = sectionTag["Blocks"].ByteArrayValue; var data = sectionTag["Data"].ByteArrayValue; var addTag = sectionTag["Add"]; var adddata = new byte[2048]; if (addTag != null) { adddata = addTag.ByteArrayValue; } var blockLight = sectionTag["BlockLight"].ByteArrayValue; var skyLight = sectionTag["SkyLight"].ByteArrayValue; for (var x = 0; x < 16; x++) { for (var z = 0; z < 16; z++) { for (var y = 0; y < 16; y++) { var yi = sy + y - _waterOffsetY; if (yi < 0 || yi >= 256) { continue; } var anvilIndex = y * 16 * 16 + z * 16 + x; var blockId = blocks[anvilIndex] + (Nibble4(adddata, anvilIndex) << 8); var b = BlockFactory.GetBlockById((ushort)blockId); b.Metadata = Nibble4(data, anvilIndex); chunk.SetBlock(x, yi, z, b); chunk.SetBlocklight(x, yi, z, Nibble4(blockLight, anvilIndex)); chunk.SetSkylight(x, yi, z, Nibble4(skyLight, anvilIndex)); } } } } var entities = dataTag["Entities"] as NbtList; var tileEntities = dataTag["TileEntities"] as NbtList; if (tileEntities != null) { foreach (var nbtTag in tileEntities) { var blockEntityTag = (NbtCompound)nbtTag; string entityId = blockEntityTag["id"].StringValue; int x = blockEntityTag["x"].IntValue; int y = blockEntityTag["y"].IntValue - _waterOffsetY; int z = blockEntityTag["z"].IntValue; blockEntityTag["y"] = new NbtInt("y", y); TileEntity blockEntity = TileEntityFactory.GetBlockEntityById(entityId); if (blockEntity != null) { blockEntityTag.Name = string.Empty; chunk.SetBlockEntity(new Vector3(x, y, z), blockEntityTag); } } } var tileTicks = dataTag["TileTicks"] as NbtList; chunk.IsDirty = false; return(chunk); } }
public static ChunkColumn DecocedChunkColumn(byte[] buffer) { lock (_chunkRead) { MemoryStream stream = new MemoryStream(buffer); { NbtBinaryReader defStream = new NbtBinaryReader(stream, true); Log.Debug("New chunk column"); int count = defStream.ReadByte(); if (count < 1) { Log.Warn("Nothing to read"); return(null); } if (count > 1) { Log.Debug($"Reading {count} sections"); } else { Log.Debug($"Reading {count} sections"); } ChunkColumn chunkColumn = new ChunkColumn(); for (int s = 0; s < count; s++) { int idx = defStream.ReadByte(); Log.Debug($"New section {s}, index={idx}"); Chunk chunk = chunkColumn.chunks[s]; int chunkSize = 16 * 16 * 16; defStream.Read(chunk.blocks, 0, chunkSize); //Log.Debug($"Blocks1:\n{Package.HexDump(chunk.blocks)}"); if (defStream.Read(chunk.metadata.Data, 0, chunkSize / 2) != chunkSize / 2) { Log.Error($"Out of data: metadata"); } //Log.Debug($"metadata:\n{Package.HexDump(chunk.metadata.Data)}"); if (defStream.Read(chunk.skylight.Data, 0, chunkSize / 2) != chunkSize / 2) { Log.Error($"Out of data: skylight"); } //Log.Debug($"skylight:\n{Package.HexDump(chunk.skylight.Data)}"); if (defStream.Read(chunk.blocklight.Data, 0, chunkSize / 2) != chunkSize / 2) { Log.Error($"Out of data: blocklight"); } //Log.Debug($"blocklight:\n{Package.HexDump(chunk.blocklight.Data)}"); //Log.Debug($"skylight.Data:\n{Package.HexDump(chunk.skylight.Data, 64)}"); //Log.Debug($"blocklight.Data:\n{Package.HexDump(chunk.blocklight.Data)}"); //byte[] ints = new byte[256*4]; //var readLen = defStream.Read(ints, 0, ints.Length); //if (readLen != ints.Length) Log.Error($"Out of data biomeColors, read lenght {readLen}"); //Log.Debug($"biomeColor (pre):\n{Package.HexDump(ints)}"); //return null; //int j = 0; //for (int i = 0; i < ints.Length; i = i + 4) //{ // chunk.biomeId[j] = ints[i]; // chunk.biomeColor[j++] = BitConverter.ToInt32(new[] {(byte) 0, ints[i + 1], ints[i + 2], ints[i + 3]}, 0); //} //Log.Debug($"biomeId (post):\n{Package.HexDump(chunk.biomeId)}"); //if (stream.Position >= stream.Length - 1) return chunk; ////return chunk; //return chunk; } //if (stream.Position >= stream.Length - 1) continue; if (defStream.Read(chunkColumn.height, 0, 256 * 2) != 256 * 2) { Log.Error($"Out of data height"); } //Log.Debug($"Heights:\n{Package.HexDump(chunk.height)}"); //if (stream.Position >= stream.Length - 1) continue; if (defStream.Read(chunkColumn.biomeId, 0, 256) != 256) { Log.Error($"Out of data biomeId"); } //Log.Debug($"biomeId:\n{Package.HexDump(chunk.biomeId)}"); //if (stream.Position >= stream.Length - 1) continue; int borderBlock = VarInt.ReadSInt32(stream); if (borderBlock != 0) { Log.Warn($"??? Got borderblock {borderBlock}"); } int extraCount = VarInt.ReadSInt32(stream); if (extraCount != 0) { //Log.Warn($"Got extradata\n{Package.HexDump(defStream.ReadBytes(extraCount*10))}"); for (int i = 0; i < extraCount; i++) { var hash = VarInt.ReadSInt32(stream); var blockData = defStream.ReadInt16(); Log.Warn($"Got extradata: hash=0x{hash:X2}, blockdata=0x{blockData:X2}"); } } if (stream.Position < stream.Length - 1) { //Log.Debug($"Got NBT data\n{Package.HexDump(defStream.ReadBytes((int) (stream.Length - stream.Position)))}"); while (stream.Position < stream.Length) { NbtFile file = new NbtFile() { BigEndian = false, UseVarInt = true }; file.LoadFromStream(stream, NbtCompression.None); Log.Debug($"Blockentity: {file.RootTag}"); } } if (stream.Position < stream.Length - 1) { Log.Warn($"Still have data to read\n{Package.HexDump(defStream.ReadBytes((int)(stream.Length - stream.Position)))}"); } } return(new ChunkColumn()); } }
public void NullParameterTest() { Assert.Throws<ArgumentNullException>(() => new NbtFile((NbtCompound)null)); Assert.Throws<ArgumentNullException>(() => new NbtFile((string)null)); NbtFile file = new NbtFile(); Assert.Throws<ArgumentNullException>(() => file.LoadFromBuffer(null, 0, 1, NbtCompression.None)); Assert.Throws<ArgumentNullException>(() => file.LoadFromBuffer(null, 0, 1, NbtCompression.None, tag => true)); Assert.Throws<ArgumentNullException>(() => file.LoadFromFile(null)); Assert.Throws<ArgumentNullException>(() => file.LoadFromFile(null, NbtCompression.None, tag => true)); Assert.Throws<ArgumentNullException>(() => file.LoadFromStream(null, NbtCompression.AutoDetect)); Assert.Throws<ArgumentNullException>(() => file.LoadFromStream(null, NbtCompression.AutoDetect, tag => true)); Assert.Throws<ArgumentNullException>(() => file.SaveToBuffer(null, 0, NbtCompression.None)); Assert.Throws<ArgumentNullException>(() => file.SaveToFile(null, NbtCompression.None)); Assert.Throws<ArgumentNullException>(() => file.SaveToStream(null, NbtCompression.None)); Assert.Throws<ArgumentNullException>(() => NbtFile.ReadRootTagName(null)); Assert.Throws<ArgumentNullException>( () => NbtFile.ReadRootTagName((Stream)null, NbtCompression.None, true, 0)); }
public void ReloadNonSeekableStream() { NbtFile loadedFile = new NbtFile( "TestFiles/bigtest.nbt" ); using( MemoryStream ms = new MemoryStream() ) { using( NonSeekableStream nss = new NonSeekableStream( ms ) ) { int bytesWritten = loadedFile.SaveToStream( nss, NbtCompression.None ); ms.Position = 0; int bytesRead = loadedFile.LoadFromStream( nss, NbtCompression.None, null ); Assert.AreEqual( bytesWritten, bytesRead ); AssertNbtBigFile( loadedFile ); } } }
public static ChunkColumn GetChunk(ChunkCoordinates coordinates, string basePath, IWorldProvider generator, int yoffset) { int width = 32; int depth = 32; int rx = coordinates.X >> 5; int rz = coordinates.Z >> 5; string filePath = Path.Combine(basePath, string.Format(@"region\r.{0}.{1}.mca", rx, rz)); if (!File.Exists(filePath)) { return(generator.GenerateChunkColumn(coordinates)); } using (var regionFile = File.OpenRead(filePath)) { byte[] buffer = new byte[8192]; regionFile.Read(buffer, 0, 8192); int xi = (coordinates.X % width); if (xi < 0) { xi += 32; } int zi = (coordinates.Z % depth); if (zi < 0) { zi += 32; } int tableOffset = (xi + zi * width) * 4; regionFile.Seek(tableOffset, SeekOrigin.Begin); byte[] offsetBuffer = new byte[4]; regionFile.Read(offsetBuffer, 0, 3); Array.Reverse(offsetBuffer); int offset = BitConverter.ToInt32(offsetBuffer, 0) << 4; int length = regionFile.ReadByte(); if (offset == 0 || length == 0) { return(generator.GenerateChunkColumn(coordinates)); } regionFile.Seek(offset, SeekOrigin.Begin); byte[] waste = new byte[4]; regionFile.Read(waste, 0, 4); int compressionMode = regionFile.ReadByte(); var nbt = new NbtFile(); nbt.LoadFromStream(regionFile, NbtCompression.ZLib); NbtTag dataTag = nbt.RootTag["Level"]; NbtList sections = dataTag["Sections"] as NbtList; ChunkColumn chunk = new ChunkColumn { x = coordinates.X, z = coordinates.Z, biomeId = dataTag["Biomes"].ByteArrayValue }; for (int i = 0; i < chunk.biomeId.Length; i++) { if (chunk.biomeId[i] > 22) { chunk.biomeId[i] = 0; } } if (chunk.biomeId.Length > 256) { throw new Exception(); } // This will turn into a full chunk column foreach (NbtTag sectionTag in sections) { int sy = sectionTag["Y"].ByteValue * 16; byte[] blocks = sectionTag["Blocks"].ByteArrayValue; byte[] data = sectionTag["Data"].ByteArrayValue; NbtTag addTag = sectionTag["Add"]; byte[] adddata = new byte[2048]; if (addTag != null) { adddata = addTag.ByteArrayValue; } byte[] blockLight = sectionTag["BlockLight"].ByteArrayValue; byte[] skyLight = sectionTag["SkyLight"].ByteArrayValue; for (int x = 0; x < 16; x++) { for (int z = 0; z < 16; z++) { for (int y = 0; y < 16; y++) { int yi = sy + y - yoffset; if (yi < 0 || yi >= 128) { continue; } int anvilIndex = y * 16 * 16 + z * 16 + x; int blockId = blocks[anvilIndex] + (Nibble4(adddata, anvilIndex) << 8); // Anvil to PE friendly converstion if (blockId == 125) { blockId = 5; } else if (blockId == 126) { blockId = 158; } else if (blockId == 75) { blockId = 50; } else if (blockId == 76) { blockId = 50; } else if (blockId == 123) { blockId = 89; } else if (blockId == 124) { blockId = 89; } else if (blockId == 152) { blockId = 73; } else if (_ignore.BinarySearch(blockId) >= 0) { blockId = 0; } else if (_gaps.BinarySearch(blockId) >= 0) { Debug.WriteLine("Missing material: " + blockId); blockId = 133; } if (blockId > 255) { blockId = 41; } if (yi == 127 && blockId != 0) { blockId = 30; } if (yi == 0 && (blockId == 8 || blockId == 9 /*|| blockId == 0*/)) { blockId = 7; } //if (blockId != 0) blockId = 41; chunk.SetBlock(x, yi, z, (byte)blockId); chunk.SetMetadata(x, yi, z, Nibble4(data, anvilIndex)); chunk.SetBlocklight(x, yi, z, Nibble4(blockLight, anvilIndex)); chunk.SetSkylight(x, yi, z, Nibble4(skyLight, anvilIndex)); } } } } NbtList entities = dataTag["Entities"] as NbtList; NbtList blockEntities = dataTag["TileEntities"] as NbtList; if (blockEntities != null) { foreach (var nbtTag in blockEntities) { var blockEntityTag = (NbtCompound)nbtTag; string entityId = blockEntityTag["id"].StringValue; int x = blockEntityTag["x"].IntValue; int y = blockEntityTag["y"].IntValue - yoffset; int z = blockEntityTag["z"].IntValue; blockEntityTag["y"] = new NbtInt("y", y); BlockEntity blockEntity = BlockEntityFactory.GetBlockEntityById(entityId); if (blockEntity != null) { blockEntityTag.Name = string.Empty; chunk.SetBlockEntity(new BlockCoordinates(x, y, z), blockEntityTag); } } } NbtList tileTicks = dataTag["TileTicks"] as NbtList; chunk.isDirty = false; return(chunk); } }
public void ValueTest() { // write one named tag for every value type, and read it back using (var ms = new MemoryStream()) { var writer = new NbtWriter(ms, "root"); { writer.WriteByte("byte", 1); writer.WriteShort("short", 2); writer.WriteInt("int", 3); writer.WriteLong("long", 4L); writer.WriteFloat("float", 5f); writer.WriteDouble("double", 6d); writer.WriteByteArray("byteArray", new byte[] { 10, 11, 12 }); writer.WriteIntArray("intArray", new[] { 20, 21, 22 }); writer.WriteString("string", "123"); } writer.EndCompound(); writer.Finish(); ms.Position = 0; var file = new NbtFile(); file.LoadFromStream(ms, NbtCompression.None); TestFiles.AssertValueTest(file); } }
void LoadFromStreamInternal(String fileName, NbtCompression compression) { var file = new NbtFile(); byte[] fileBytes = File.ReadAllBytes(fileName); using (var ms = new MemoryStream(fileBytes)) { file.LoadFromStream(ms, compression); } }
public ChunkColumn GetChunk(ChunkCoordinates coordinates, string basePath, IWorldProvider generator, int yoffset) { int width = 32; int depth = 32; int rx = coordinates.X >> 5; int rz = coordinates.Z >> 5; string filePath = Path.Combine(basePath, string.Format(@"region{2}r.{0}.{1}.mca", rx, rz, Path.DirectorySeparatorChar)); if (!File.Exists(filePath)) { var chunkColumn = generator?.GenerateChunkColumn(coordinates); if (chunkColumn != null) { chunkColumn.NeedSave = true; } return(chunkColumn); //return new ChunkColumn //{ // x = coordinates.X, // z = coordinates.Z, //}; } using (var regionFile = File.OpenRead(filePath)) { byte[] buffer = new byte[8192]; regionFile.Read(buffer, 0, 8192); int xi = (coordinates.X % width); if (xi < 0) { xi += 32; } int zi = (coordinates.Z % depth); if (zi < 0) { zi += 32; } int tableOffset = (xi + zi * width) * 4; regionFile.Seek(tableOffset, SeekOrigin.Begin); byte[] offsetBuffer = new byte[4]; regionFile.Read(offsetBuffer, 0, 3); Array.Reverse(offsetBuffer); int offset = BitConverter.ToInt32(offsetBuffer, 0) << 4; byte[] bytes = BitConverter.GetBytes(offset >> 4); Array.Reverse(bytes); if (offset != 0 && offsetBuffer[0] != bytes[0] && offsetBuffer[1] != bytes[1] && offsetBuffer[2] != bytes[2]) { throw new Exception($"Not the same buffer\n{Package.HexDump(offsetBuffer)}\n{Package.HexDump(bytes)}"); } int length = regionFile.ReadByte(); if (offset == 0 || length == 0) { var chunkColumn = generator?.GenerateChunkColumn(coordinates); if (chunkColumn != null) { chunkColumn.NeedSave = true; } return(chunkColumn); //return new ChunkColumn //{ // x = coordinates.X, // z = coordinates.Z, //}; } regionFile.Seek(offset, SeekOrigin.Begin); byte[] waste = new byte[4]; regionFile.Read(waste, 0, 4); int compressionMode = regionFile.ReadByte(); if (compressionMode != 0x02) { throw new Exception($"CX={coordinates.X}, CZ={coordinates.Z}, NBT wrong compression. Expected 0x02, got 0x{compressionMode :X2}. " + $"Offset={offset}, length={length}\n{Package.HexDump(waste)}"); } var nbt = new NbtFile(); nbt.LoadFromStream(regionFile, NbtCompression.ZLib); NbtTag dataTag = nbt.RootTag["Level"]; NbtList sections = dataTag["Sections"] as NbtList; ChunkColumn chunk = new ChunkColumn { x = coordinates.X, z = coordinates.Z, biomeId = dataTag["Biomes"].ByteArrayValue, isAllAir = true }; if (chunk.biomeId.Length > 256) { throw new Exception(); } // This will turn into a full chunk column foreach (NbtTag sectionTag in sections) { ReadSection(yoffset, sectionTag, chunk); } NbtList entities = dataTag["Entities"] as NbtList; NbtList blockEntities = dataTag["TileEntities"] as NbtList; if (blockEntities != null) { foreach (var nbtTag in blockEntities) { var blockEntityTag = (NbtCompound)nbtTag.Clone(); string entityId = blockEntityTag["id"].StringValue; int x = blockEntityTag["x"].IntValue; int y = blockEntityTag["y"].IntValue - yoffset; int z = blockEntityTag["z"].IntValue; blockEntityTag["y"] = new NbtInt("y", y); BlockEntity blockEntity = BlockEntityFactory.GetBlockEntityById(entityId); if (blockEntity != null) { blockEntityTag.Name = string.Empty; if (blockEntity is Sign) { // Remove the JSON stuff and get the text out of extra data. // TAG_String("Text2"): "{"extra":["10c a loaf!"],"text":""}" CleanSignText(blockEntityTag, "Text1"); CleanSignText(blockEntityTag, "Text2"); CleanSignText(blockEntityTag, "Text3"); CleanSignText(blockEntityTag, "Text4"); } else if (blockEntity is ChestBlockEntity) { NbtList items = (NbtList)blockEntityTag["Items"]; if (items != null) { //for (byte i = 0; i < items.Count; i++) //{ // NbtCompound item = (NbtCompound) items[i]; // item.Add(new NbtShort("OriginalDamage", item["Damage"].ShortValue)); // byte metadata = (byte) (item["Damage"].ShortValue & 0xff); // item.Remove("Damage"); // item.Add(new NbtByte("Damage", metadata)); //} } } chunk.SetBlockEntity(new BlockCoordinates(x, y, z), blockEntityTag); } } } //NbtList tileTicks = dataTag["TileTicks"] as NbtList; chunk.isDirty = false; return(chunk); } }
public void CompoundListTest() { // test writing various combinations of compound tags and list tags const string testString = "Come on and slam, and welcome to the jam."; using (var ms = new MemoryStream()) { var writer = new NbtWriter(ms, "Test"); { writer.BeginCompound("EmptyCompy"); {} writer.EndCompound(); writer.BeginCompound("OuterNestedCompy"); { writer.BeginCompound("InnerNestedCompy"); { writer.WriteInt("IntTest", 123); writer.WriteString("StringTest", testString); } writer.EndCompound(); } writer.EndCompound(); writer.BeginList("ListOfInts", NbtTagType.Int, 3); { writer.WriteInt(1); writer.WriteInt(2); writer.WriteInt(3); } writer.EndList(); writer.BeginCompound("CompoundOfListsOfCompounds"); { writer.BeginList("ListOfCompounds", NbtTagType.Compound, 1); { writer.BeginCompound(); { writer.WriteInt("TestInt", 123); } writer.EndCompound(); } writer.EndList(); } writer.EndCompound(); writer.BeginList("ListOfEmptyLists", NbtTagType.List, 3); { writer.BeginList(NbtTagType.List, 0); {} writer.EndList(); writer.BeginList(NbtTagType.List, 0); {} writer.EndList(); writer.BeginList(NbtTagType.List, 0); {} writer.EndList(); } writer.EndList(); } writer.EndCompound(); writer.Finish(); ms.Seek(0, SeekOrigin.Begin); var file = new NbtFile(); file.LoadFromStream(ms, NbtCompression.None); Console.WriteLine(file.ToString()); } }
public void ComplexStringsTest() { // Use a fixed seed for repeatability of this test Random rand = new Random(0); // Generate random Unicode strings const int numStrings = 1024; List<string> writtenStrings = new List<string>(); for (int i = 0; i < numStrings; i++) { writtenStrings.Add(GenRandomUnicodeString(rand)); } using (var ms = new MemoryStream()) { // Write a list of strings NbtWriter writer = new NbtWriter(ms, "test"); writer.BeginList("stringList", NbtTagType.String, numStrings); foreach (string s in writtenStrings) { writer.WriteString(s); } writer.EndList(); writer.EndCompound(); writer.Finish(); // Rewind! ms.Position = 0; // Let's read what we have written, and check contents NbtFile file = new NbtFile(); file.LoadFromStream(ms, NbtCompression.None); var readStrings = file.RootTag.Get<NbtList>("stringList") .ToArray<NbtString>() .Select(tag => tag.StringValue); // Make sure that all read/written strings match exactly CollectionAssert.AreEqual(writtenStrings, readStrings); } }
public void ReloadNonSeekableStream() { var loadedFile = new NbtFile(TestFiles.Big); using (var ms = new MemoryStream()) { using (var nss = new NonSeekableStream(ms)) { long bytesWritten = loadedFile.SaveToStream(nss, NbtCompression.None); ms.Position = 0; Assert.Throws<NotSupportedException>(() => loadedFile.LoadFromStream(nss, NbtCompression.AutoDetect)); ms.Position = 0; Assert.Throws<InvalidDataException>(() => loadedFile.LoadFromStream(nss, NbtCompression.ZLib)); ms.Position = 0; long bytesRead = loadedFile.LoadFromStream(nss, NbtCompression.None); Assert.AreEqual(bytesWritten, bytesRead); TestFiles.AssertNbtBigFile(loadedFile); } } }