private void Start() { chunkInformations = new Dictionary <int2, ChunkInformation>(); chunks = new NativeHashMap <int2, Chunk>(0, Allocator.Persistent); renderChunkKernelIndex = computeShader.FindKernel("render_chunk"); cellColorComputeBuffer = new ComputeBuffer(ChunkSize * ChunkSize, colorStride); computeShader.SetBuffer(renderChunkKernelIndex, "cell_colors", cellColorComputeBuffer); for (var x = -3; x < 4; x++) { for (var y = -3; y < 4; y++) { var chunkRenderer = Instantiate(chunkPrefab, transform, true); chunkRenderer.transform.position = chunkScale * new Vector3(x, y); chunkRenderer.transform.localScale = new Vector3(chunkScale, chunkScale, 1); var outputRenderTexture = new RenderTexture(ChunkSize, ChunkSize, 32) { enableRandomWrite = true, useMipMap = false, filterMode = FilterMode.Point }; outputRenderTexture.Create(); chunkRenderer.material.mainTexture = outputRenderTexture; var chunk = new Chunk(ChunkSize); var chunkContainer = new ChunkInformation { Renderer = chunkRenderer, OutputRenderTexture = outputRenderTexture }; chunks.Add(int2(x, y), chunk); chunkInformations.Add(int2(x, y), chunkContainer); } } StartCoroutine(UpdateWorldCoroutine()); }
public static ChunkInformation GetGlobalChunkInformationById(byte[] id) { ChunkInformation res = new ChunkInformation(); res.id = id; res.bundleIndex = -1; SQLiteConnection con = GetConnection(); con.Open(); SQLiteDataReader reader = getAllWhere("globalchunks", "id = '" + Helpers.ByteArrayToHexString(id) + "'", con); if (reader.Read()) { res.id = Helpers.HexStringToByteArray((string)reader["id"]); res.sha1 = Helpers.HexStringToByteArray((string)reader["sha1"]); } con.Close(); return(res); }
public static ChunkInformation[] GetChunkInformationBySHA1(string sha1) { sha1 = sha1.ToUpper(); List <ChunkInformation> res = new List <ChunkInformation>(); SQLiteConnection con = GetConnection(); con.Open(); SQLiteDataReader reader = getAllWhere("chunks", "sha1 = '" + sha1 + "'", con); if (reader.Read()) { ChunkInformation ci = new ChunkInformation(); ci.id = Helpers.HexStringToByteArray(reader.GetString(0)); ci.sha1 = Helpers.HexStringToByteArray(reader.GetString(1)); ci.bundleIndex = reader.GetInt32(2); res.Add(ci); } con.Close(); return(res.ToArray()); }
private static List <Chunk> ParseRegionFile(string filePath) { ChunkInformation[] chunkInfo = new ChunkInformation[1024]; //There is exactly 1024 chunks in a region, 32x32 byte[] readBlock = new byte[4096]; short currentIndex = 0; string outputString = ""; FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); fs.Read(readBlock, 0, 4096); fs.Close(); for (int i = 0; i < 1024; i++) { string[] test = new string[4]; test[0] = ByteString(readBlock[currentIndex]); test[1] = ByteString(readBlock[currentIndex + 1]); test[2] = ByteString(readBlock[currentIndex + 2]); test[3] = ByteString(readBlock[currentIndex + 3]); //Reading in first 3 bytes as an integer chunkInfo[i].Offset = (readBlock[currentIndex] << 16) + (readBlock[currentIndex + 1] << 8) + readBlock[currentIndex + 2]; currentIndex += 3; // Advancing the read marker past the first 3 bytes chunkInfo[i].Size = readBlock[currentIndex]; //Reading in the last byte currentIndex++; // Advancing the read marker by one byte if ((chunkInfo[i].Offset + chunkInfo[i].Size) != 0) { //outputString += "Chunk " + i + ": " + chunkInfo[i].Offset + " [" + chunkInfo[i].Size + "]\t" + test[0] + " " + test[1] + " " + test[2] + " " + test[3] + Environment.NewLine; outputString += String.Format("Chunk {0,4}:{1,4} [{2,1}]{3,10}{4,10}{5,10}{6,10}", i, chunkInfo[i].Offset, chunkInfo[i].Size, test[0], test[1], test[2], test[3]) + Environment.NewLine; } } return(ParseChunks(filePath, chunkInfo)); }
private static List<Chunk> ParseChunks(string filePath, ChunkInformation[] chunkInfo) { int offsetIndex, currentIndex; byte[] readBlock, compressedBlock, decompressedBlock; List<Chunk> chunkList = new List<Chunk>(); //VoxelTerrain terrain = new VoxelTerrain(); FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); for (int i = 0; i < chunkInfo.Length; i++) { //We don't care if the chunk doesn't exist, as is indicated by a 0 for offset and length if (chunkInfo[i].Offset == 0 && chunkInfo[i].Size == 0) continue; //Calculating offset, which is set in 4096 byte sections offsetIndex = chunkInfo[i].Offset * 4096; currentIndex = 0; // We know the size of our chunk data, which is also in 4096 byte sections readBlock = new byte[chunkInfo[i].Size * 4096]; //Now we can populate our block fs.Seek(offsetIndex, SeekOrigin.Begin); fs.Read(readBlock, 0, chunkInfo[i].Size * 4096); //Now that we have our chunk data read in, we can parse it //First is the length in bytes of the data chunkInfo[i].ByteLength = CorrectEndian(BitConverter.ToInt32(readBlock, currentIndex)); //That was 4 bytes of data, so we can push our index in further currentIndex += 4; //We should probably set up the size of our compressed array as well compressedBlock = new byte[chunkInfo[i].ByteLength - 1]; //The next byte should be our compression type chunkInfo[i].Compression = (CompressionType)readBlock[currentIndex]; //Only 1 byte forward this time currentIndex++; if (chunkInfo[i].Compression == CompressionType.GZip) { //Now we can just grab our compressed chunk data Array.Copy(readBlock, currentIndex, compressedBlock, 0, chunkInfo[i].ByteLength - 1); //and of course decompress it decompressedBlock = DecompressGZip(compressedBlock); } else { //Here we're snagging the compressed chunk data //Since we're decompressing with Deflate, rather than Zlib, we need to cut off the //header tags. To do this we remove the first 2 (index + 2) and last 4 bytes (length - 4) Array.Copy(readBlock, currentIndex + 2, compressedBlock, 0, chunkInfo[i].ByteLength - 1 - 4); //Now we can finally decompress decompressedBlock = DecompressZLib(compressedBlock); } //now the decompressed data and the chunk Info are ready to port out to an NBT parser if (chunkInfo[i].Offset != 0 && chunkInfo[i].Size != 0) { NamedBinaryTag topLevel = new NamedBinaryTag(); GetTagList(decompressedBlock, 0, topLevel); if (fileType == FileType.FILE_MCR) { NamedBinaryTag xPos = FindTag("xPos", topLevel); NamedBinaryTag zPos = FindTag("zPos", topLevel); NamedBinaryTag Blocks = FindTag("Blocks", topLevel); if (xPos == null || zPos == null || Blocks == null) throw new NullReferenceException("One or more tags not found"); else { chunkList.Add(new Chunk(new Vector2((float)xPos.GetInt(), (float)zPos.GetInt()), Blocks)); } } else { NamedBinaryTag xPos = FindTag("xPos", topLevel); NamedBinaryTag zPos = FindTag("zPos", topLevel); NamedBinaryTag Blocks; NamedBinaryTag sectionsTag = FindTag("Sections", topLevel); //this finds a tag, but the value is always 0 even though nbt viewer shows 0,1,2,etc. if (xPos == null || zPos == null || sectionsTag == null) throw new NullReferenceException("One or more tags not found"); else { List<NamedBinaryTag> sections = sectionsTag.GetList(); byte[] sectionBlockArray = new byte[16 * 16 * 16]; byte[, ,] chunkBlockArray = new byte[16, 256, 16]; //start by populating with [x,y,z] since its easier to understand int yOffset; foreach (NamedBinaryTag section in sections) { yOffset = FindTag("Y", section).GetByte() * 16; sectionBlockArray = FindTag("Blocks", section).GetByteArray(); for (int y = 0; y < 16; y++) for (int z = 0; z < 16; z++) for (int x = 0; x < 16; x++) chunkBlockArray[x, y + yOffset, z] = sectionBlockArray[(y * 16 + z) * 16 + x]; } //chunkBlockArray contains all blocks from its sections in [x,y,z] coordinates/locations //now convert to a flattened array ordered XZY so that it matches the mcr style that this importer is set up for /* byte[] flattenedChunkBlockArray = new byte[16 * 16 * 256]; for (int y = 0; y < 256; y++) for (int z = 0; z < 16; z++) for (int x = 0; x < 16; x++) flattenedChunkBlockArray[(x * 16 + z) * 16 + y] = chunkBlockArray[x, y, z]; //horribly inefficient conversion complete, now load it like an mcr file. */ //cheat and just send 3d array, process differently in generateBlocks Blocks = new NamedBinaryTag(TagType.TAG_Byte_Array, chunkBlockArray); chunkList.Add(new Chunk(new Vector2((float)xPos.GetInt(), (float)zPos.GetInt()), Blocks)); } } } } fs.Close(); UpdateStatus("Done!"); return chunkList; }
private static List<Chunk> ParseRegionFile(string filePath) { ChunkInformation[] chunkInfo = new ChunkInformation[1024]; //There is exactly 1024 chunks in a region, 32x32 byte[] readBlock = new byte[4096]; short currentIndex = 0; string outputString = ""; FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); fs.Read(readBlock, 0, 4096); fs.Close(); for (int i = 0; i < 1024; i++) { string[] test = new string[4]; test[0] = ByteString(readBlock[currentIndex]); test[1] = ByteString(readBlock[currentIndex + 1]); test[2] = ByteString(readBlock[currentIndex + 2]); test[3] = ByteString(readBlock[currentIndex + 3]); //Reading in first 3 bytes as an integer chunkInfo[i].Offset = (readBlock[currentIndex] << 16) + (readBlock[currentIndex + 1] << 8) + readBlock[currentIndex + 2]; currentIndex += 3; // Advancing the read marker past the first 3 bytes chunkInfo[i].Size = readBlock[currentIndex]; //Reading in the last byte currentIndex++; // Advancing the read marker by one byte if ((chunkInfo[i].Offset + chunkInfo[i].Size) != 0) //outputString += "Chunk " + i + ": " + chunkInfo[i].Offset + " [" + chunkInfo[i].Size + "]\t" + test[0] + " " + test[1] + " " + test[2] + " " + test[3] + Environment.NewLine; outputString += String.Format("Chunk {0,4}:{1,4} [{2,1}]{3,10}{4,10}{5,10}{6,10}", i, chunkInfo[i].Offset, chunkInfo[i].Size, test[0], test[1], test[2], test[3]) + Environment.NewLine; } return ParseChunks(filePath, chunkInfo); }
public static ChunkInformation[] GetChunkInformationBySHA1(string sha1) { sha1 = sha1.ToUpper(); List<ChunkInformation> res = new List<ChunkInformation>(); SQLiteConnection con = GetConnection(); con.Open(); SQLiteDataReader reader = getAllWhere("chunks", "sha1 = '" + sha1 + "'", con); if (reader.Read()) { ChunkInformation ci = new ChunkInformation(); ci.id = Helpers.HexStringToByteArray(reader.GetString(0)); ci.sha1 = Helpers.HexStringToByteArray(reader.GetString(1)); ci.bundleIndex = reader.GetInt32(2); res.Add(ci); } con.Close(); return res.ToArray(); }
public static ChunkInformation GetChunkInformationById(byte[] id) { ChunkInformation res = new ChunkInformation(); res.id = id; res.bundleIndex = -1; SQLiteConnection con = GetConnection(); con.Open(); SQLiteDataReader reader = getAllWhere("chunks", "id = '" + Helpers.ByteArrayToHexString(id) + "'", con); if (reader.Read()) { res.id = Helpers.HexStringToByteArray(reader.GetString(0)); res.sha1 = Helpers.HexStringToByteArray(reader.GetString(1)); res.bundleIndex = reader.GetInt32(2); } con.Close(); return res; }
private static List<Chunk> ParseChunks(string filePath, ChunkInformation[] chunkInfo) { int offsetIndex, currentIndex; byte[] readBlock, compressedBlock, decompressedBlock; List<Chunk> chunkList = new List<Chunk>(); //VoxelTerrain terrain = new VoxelTerrain(); FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); for (int i = 0; i < chunkInfo.Length; i++) { //We don't care if the chunk doesn't exist, as is indicated by a 0 for offset and length if (chunkInfo[i].Offset == 0 && chunkInfo[i].Size == 0) continue; //Calculating offset, which is set in 4096 byte sections offsetIndex = chunkInfo[i].Offset * 4096; currentIndex = 0; // We know the size of our chunk data, which is also in 4096 byte sections readBlock = new byte[chunkInfo[i].Size * 4096]; //Now we can populate our block fs.Seek(offsetIndex, SeekOrigin.Begin); fs.Read(readBlock, 0, chunkInfo[i].Size * 4096); //Now that we have our chunk data read in, we can parse it //First is the length in bytes of the data chunkInfo[i].ByteLength = CorrectEndian(BitConverter.ToInt32(readBlock, currentIndex)); //That was 4 bytes of data, so we can push our index in further currentIndex += 4; //We should probably set up the size of our compressed array as well compressedBlock = new byte[chunkInfo[i].ByteLength - 1]; //The next byte should be our compression type chunkInfo[i].Compression = (CompressionType)readBlock[currentIndex]; //Only 1 byte forward this time currentIndex++; if (chunkInfo[i].Compression == CompressionType.GZip) { //Now we can just grab our compressed chunk data Array.Copy(readBlock, currentIndex, compressedBlock, 0, chunkInfo[i].ByteLength - 1); //and of course decompress it decompressedBlock = DecompressGZip(compressedBlock); } else { //Here we're snagging the compressed chunk data //Since we're decompressing with Deflate, rather than Zlib, we need to cut off the //header tags. To do this we remove the first 2 (index + 2) and last 4 bytes (length - 4) Array.Copy(readBlock, currentIndex + 2, compressedBlock, 0, chunkInfo[i].ByteLength - 1 - 4); //Now we can finally decompress decompressedBlock = DecompressZLib(compressedBlock); } //now the decompressed data and the chunk Info are ready to port out to an NBT parser if (chunkInfo[i].Offset != 0 && chunkInfo[i].Size != 0) { NamedBinaryTag topLevel = new NamedBinaryTag(); GetTagList(decompressedBlock, 0, topLevel); if (FindTag("Sections", topLevel) == null) { NamedBinaryTag xPos = FindTag("xPos", topLevel); NamedBinaryTag zPos = FindTag("zPos", topLevel); NamedBinaryTag Blocks = FindTag("Blocks", topLevel); if (xPos == null || zPos == null || Blocks == null) throw new NullReferenceException("One or more tags not found"); else { chunkList.Add(new Chunk(new Vector2((float)xPos.GetInt(), (float)zPos.GetInt()), Blocks)); //terrain.ConvertChunkToBlocks(xPos.GetInt(), zPos.GetInt(), Blocks.GetByteArray(), 128); } } else { Debug.WriteLine("FOUND A SECTION"); } } } fs.Close(); UpdateStatus("Done!"); return chunkList; }