/// <summary> /// Parses a block file from the specified stream. /// </summary> /// <param name="stream">A BlockchainStream to read data from.</param> /// <exception cref="System.ArgumentException">The specified stream is NULL or invalid.</exception> /// <exception cref="System.InvalidOperationException">The block file could not be parsed from the specified stream.</exception> /// <returns>A BlockFile parsed from the stream.</returns> internal static BlockFile Parse(BlockchainStream stream) { BlockFile returnValue; // Verify params if (stream == null) { throw new ArgumentException("The specified stream is NULL", nameof(stream)); } try { // Defaults returnValue = new BlockFile(); // Load all blocks from the stream while (BlockParser.TryParse(stream, out Block block)) { returnValue.Blocks.Add(block); } } catch (Exception ex) { throw new InvalidOperationException("Unable to load the block file from the specified stream", ex); } return(returnValue); }
/// <summary> /// Parses a block file from the specified file. /// </summary> /// <param name="filePath">A string containing the path of the file.</param> /// <exception cref="System.ArgumentException">The specified filePath is NULL or invalid.</exception> /// <exception cref="System.InvalidOperationException">The block file could not be parsed from the specified file.</exception> /// <returns>A BlockFile parsed from the file.</returns> public static BlockFile ParseFile(string filePath) { BlockFile returnValue = null; // Verify params if (string.IsNullOrWhiteSpace(filePath)) { throw new ArgumentException("The specified file is NULL or empty", nameof(filePath)); } if (!File.Exists(filePath)) { throw new ArgumentException("The specified file does not exist", nameof(filePath)); } try { using FileStream stream = File.Open(filePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); returnValue = Parse(stream); } catch (Exception ex) { throw new InvalidOperationException("Unable to parse the block file from the specified file", ex); } return(returnValue); }
public void from_empty_file() { using (var data = new VolatileMemory(8192)) using (var file = new BlockFile(data, 42)) { var blocks = file.EnumerateBlocks().ToArray(); Assert.Single(blocks); Assert.Equal(default, blocks[0].hash);
public BlockFile LoadBlock(int X, int Y) { string FileName = Path.Combine(FileDirectory, FileBaseName + "_" + (Y * BlockWidth + X) + ".mdata"); BlockFile Block; using (Stream FileStream = File.Open(FileName, FileMode.Open)) { NetworkReader r = new NetworkReader(FileStream); Block = new BlockFile(); Block.Read(r); } return(Block); }
private void UpdateUnZipProgress(object _threadId, bool timeout) { BlockFile blockFile = (BlockFile)_threadId; if (timeout) { Debug.LogError("One File UnZip Timeout:" + timeout); } else { this.finishSize++; this.unZipProgress = (float)this.finishSize / (float)this.threadNum; } }
public void BlockFile_ReadWrite_Reverse() { BlockFile bf; string path; byte[] buf = new byte[cbBlock]; byte[] test = new byte[cbBlock]; path = Path.GetTempPath() + Helper.NewGuid().ToString() + ".blocks"; bf = new BlockFile(path, cbBlock, 0, FileMode.Create); try { // Write blocks backwards read them back stepping backwards // as well. for (int i = cBlocks - 1; i >= 0; i--) { for (int j = 0; j < cbBlock; j++) { buf[j] = (byte)(j + i + 7); } bf.Write(i, buf, cbBlock); } for (int i = cBlocks - 1; i >= 0; i--) { for (int j = 0; j < cbBlock; j++) { test[j] = (byte)(j + i + 7); } bf.Read(i, buf, cbBlock); CollectionAssert.AreEqual(test, buf); } } finally { if (bf != null) { bf.Close(); } if (File.Exists(path)) { File.Delete(path); } } }
public void BlockFile_Preallocate() { BlockFile bf; string path; byte[] buf = new byte[cbBlock]; byte[] test = new byte[cbBlock]; path = Path.GetTempPath() + Helper.NewGuid().ToString() + ".blocks"; bf = new BlockFile(path, cbBlock, cBlocks, FileMode.Create); try { // Write blocks forward from block 0 on and then // read them back stepping forward as well. for (int i = 0; i < cBlocks; i++) { for (int j = 0; j < cbBlock; j++) { buf[j] = (byte)(j + i); } bf.Write(i, buf, cbBlock); } for (int i = 0; i < cBlocks; i++) { for (int j = 0; j < cbBlock; j++) { test[j] = (byte)(j + i); } bf.Read(i, buf, cbBlock); CollectionAssert.AreEqual(test, buf); } } finally { if (bf != null) { bf.Close(); } if (File.Exists(path)) { File.Delete(path); } } }
/// <summary> /// Tries to parse a block file from the specified stream. /// </summary> /// <param name="stream">A BlockchainStream to read data from.</param> /// <param name="parsedObject">A BlockFile that will be set to the parsed block file.</param> /// <returns>A bool indicating whether the block file was parsed from the stream.</returns> internal static bool TryParse(BlockchainStream stream, out BlockFile parsedObject) { return(_parserCore.TryParse(stream, out parsedObject)); }
public static void CreateMap(int Width, int Height, TileData[] Tiles, Action <double, string> UpdateLoading, ushort[,] DefaultTiles = null) { UpdateLoading(0.1, $"Creating Map Descriptor..."); DispatchQueue.DispatchIO(() => { if (!Directory.Exists(MapTempPath)) { Directory.CreateDirectory(MapTempPath); } MapDescriptorFile MapFile = new MapDescriptorFile() { BlockWidth = (uint)Math.Ceiling((double)Width / MapDescriptorFile.Block_Size), BlockHeight = (uint)Math.Ceiling((double)Height / MapDescriptorFile.Block_Size) }; int BlocksDone = 0; int Blocks = (int)MapFile.BlockHeight * (int)MapFile.BlockWidth; MapFile.Blocks = new string[MapFile.BlockWidth, MapFile.BlockHeight]; for (int y = 0; y < MapFile.BlockHeight; y++) { for (int x = 0; x < MapFile.BlockWidth; x++) { UpdateLoading(0.1 + 0.5 * (((double)BlocksDone + 1) / Blocks), $"Creating Block {BlocksDone + 1} / {Blocks}..."); int StartX = x * MapDescriptorFile.Block_Size; int StartY = y * MapDescriptorFile.Block_Size; BlockFile Block = new BlockFile() { Width = (ushort)((Width - StartX < MapDescriptorFile.Block_Size) ? Width - StartX : MapDescriptorFile.Block_Size), Height = (ushort)((Height - StartY < MapDescriptorFile.Block_Size) ? Height - StartY : MapDescriptorFile.Block_Size) }; Block.Tiles = new ushort[Block.Width, Block.Height]; for (int yy = 0; yy < Block.Height; yy++) { for (int xx = 0; xx < Block.Width; xx++) { if (DefaultTiles == null) { Block.Tiles[xx, yy] = Tiles[0].Type; } else { Block.Tiles[xx, yy] = DefaultTiles[StartX + xx, StartY + yy]; } } } string FileName = MapDataTempName.Replace("%", $"_{y * MapFile.BlockWidth + x}"); MapFile.Blocks[x, y] = FileName; using (MemoryStream Stream = new MemoryStream()) { NetworkWriter w = new NetworkWriter(Stream); Block.Write(w); File.WriteAllBytes(Path.Combine(MapTempPath, FileName), Stream.ToArray()); } BlocksDone++; } } UpdateLoading(0.7, $"Saving Map Descriptor..."); using (MemoryStream Stream = new MemoryStream()) { NetworkWriter w = new NetworkWriter(Stream); MapFile.Write(w); File.WriteAllBytes(Path.Combine(MapTempPath, MapTempName), Stream.ToArray()); } UpdateLoading(0.8, $"Rasterizing First Block..."); DispatchQueue.DispatchMain(() => { JuixelGame.Shared.ChangeScene(new MapScene(new MapDataLoader(Path.Combine(MapTempPath, MapTempName)), Tiles, JuixelGame.Shared.CurrentScene.Size)); }); }); }
public BlockFile Parse() { BlockFile result = new BlockFile(); result.header.facet=br.ReadByte(); result.header.FileID = br.ReadUInt16(); fileid = result.header.FileID; if (output != null) WriteFileOut(result.header.Dump()); mp = new MapPoint(); mp.offsetx = 0; mp.x = AdjustX(0); mp.offsety = 0; mp.y = AdjustY(0); mp.lt = ReadLandtile(); ushort currentx=0; ushort currenty=0; byte type1,type2; while (br.BaseStream.Position < br.BaseStream.Length - 3) { type1 = br.ReadByte(); type2 = br.ReadByte(); if (type1 == 0 && type2 == 0) //Landtile { currenty++; if (currenty == BLOCK_HEIGHT) { currentx++; currenty = 0; if (currentx >= BLOCK_WIDTH) WriteFileErr("FileID=" + fileid + ", ERROR X is too huge: " + currentx); } if ((mp.delimiter_list.Count > 0 && delimitercheck) || (mp.static_list.Count > 0 && staticcheck) || (mp.delimiter_list.Count == 0 && mp.static_list.Count == 0 && tilecheck)) { if(!writeonlyfiles) result.blocks.Add(mp); if(output!=null) WriteFileOut(mp.Dump()); } mp = new MapPoint(); mp.offsetx = currentx; mp.x = AdjustX(currentx); mp.offsety = currenty; mp.y = AdjustY(currenty); mp.lt=ReadLandtile(); } else if (type1 == 0 && type2 != 0) //Static { for (int i = 0; i < type2; i++) { if (i > 0) { ushort zero = br.ReadUInt16(); if (zero != 0) WriteFileErr("FileID=" + fileid + ", X=" + AdjustX(currentx) + " (offset " + currentx + "), Y=" + AdjustY(currenty) + " (offset " + currenty + "), error static header placeholder zero byte is not zero: " + zero); } mp.static_list.Add(ReadStatic()); } } else if (type1 > 0) { br.BaseStream.Position--; for (int i = 0; i < type1; i++) { if (i > 0) { byte zero = br.ReadByte(); if (zero != 0) WriteFileErr("FileID=" + fileid + ", X=" + AdjustX(currentx) + " (offset " + currentx + "), Y=" + AdjustY(currenty) + " (offset " + currenty + "), error delimiter type1 placeholder zero byte is not zero: " + zero); } mp.delimiter_list.Add(ReadDelimiter()); } } else WriteFileErr("FileID=" + fileid + ", ERROR Type undefined, type1:" + type1 + ", type2:" + type2); } if ((mp.delimiter_list.Count > 0 && delimitercheck) || (mp.static_list.Count > 0 && staticcheck) || (mp.delimiter_list.Count == 0 && mp.static_list.Count == 0 && tilecheck)) { if (!writeonlyfiles) result.blocks.Add(mp); if (output != null) WriteFileOut(mp.Dump()); } br.Close(); return result; }
private void UnZipBlockFiles(object _bf) { DateTime now = DateTime.get_Now(); BlockFile blockFile = (BlockFile)_bf; Debug.LogError(string.Concat(new object[] { "UnZipBlockFiles Start :", blockFile.threadId, ", dt:", now })); using (ZipInputStream zipInputStream = new ZipInputStream(File.OpenRead(this.zipFileName))) { for (int i = 1; i <= blockFile.endPosition; i++) { if (i < blockFile.startPosition) { zipInputStream.GetNextEntry(); } else { ZipEntry nextEntry = zipInputStream.GetNextEntry(); if (nextEntry == null) { break; } string text = Path.Combine(blockFile.tarDir, Path.GetDirectoryName(nextEntry.get_Name())); string fileName = Path.GetFileName(nextEntry.get_Name()); if (string.IsNullOrEmpty(fileName)) { if (File.Exists(Path.Combine(text, fileName)) && !blockFile.overWrite) { Debug.LogError(string.Concat(new object[] { "overWrite:", blockFile.overWrite, " ,File.Exists:", fileName })); } else { using (FileStream fileStream = File.Create(Path.Combine(text, fileName))) { byte[] array = new byte[2048]; while (true) { int num = zipInputStream.Read(array, 0, array.Length); if (num <= 0) { break; } fileStream.Write(array, 0, num); } } } } } } } TimeSpan timeSpan = DateTime.get_Now() - now; Debug.LogError(string.Concat(new object[] { "UnZipBlockFiles end :", blockFile.threadId, ", ts:", timeSpan })); this.waitOneThread.Set(); }
public void BlockFile_Async() { BlockFile bf; string path; byte[] test = new byte[cbBlock]; IAsyncResult[] ars = new IAsyncResult[cBlocks]; byte[][] bufs; path = Path.GetTempPath() + Helper.NewGuid().ToString() + ".blocks"; bf = new BlockFile(path, cbBlock, cBlocks, FileMode.Create); bufs = new byte[cBlocks][]; for (int i = 0; i < cBlocks; i++) { bufs[i] = new byte[cbBlock]; } try { // Initiate asynchronous write operations for all of // the blocks. for (int i = 0; i < cBlocks; i++) { for (int j = 0; j < cbBlock; j++) { bufs[i][j] = (byte)(j + i); } } for (int i = 0; i < cBlocks; i++) { ars[i] = bf.BeginWrite(i, bufs[i], cbBlock, null, null); } // Wait for all of the writes to complete. for (int i = 0; i < cBlocks; i++) { bf.EndWrite(ars[i]); } // Clear the buffers for (int i = 0; i < cBlocks; i++) { for (int j = 0; j < cbBlock; j++) { bufs[i][j] = 0; } } // Now go back and initiate async operations to read the blocks // back. for (int i = cBlocks - 1; i >= 0; i--) { ars[i] = bf.BeginRead(i, bufs[i], cbBlock, null, null); } // Wait for all of the reads to complete. for (int i = 0; i < cBlocks; i++) { bf.EndRead(ars[i]); } // Now verify the data read. for (int i = 0; i < cBlocks; i++) { for (int j = 0; j < cbBlock; j++) { test[j] = (byte)(j + i); } CollectionAssert.AreEqual(test, bufs[i]); } } finally { if (bf != null) { bf.Close(); } if (File.Exists(path)) { File.Delete(path); } } }