public PackagedFileInfo WriteFile(FileInfo info) { // Assume that all files are written uncompressed (worst-case) when calculating package sizes var size = info.Size(); if (streams.Last().Position + size > MaxPackageSize) { // Start a new package file if the current one is full. var partPath = Package.MakePartFilename(path, streams.Count); var nextPart = new FileStream(partPath, FileMode.Create, FileAccess.Write); streams.Add(nextPart); } var stream = streams.Last(); var packaged = new PackagedFileInfo(); packaged.PackageStream = stream; packaged.Name = info.Name; packaged.UncompressedSize = size; packaged.ArchivePart = (UInt32)(streams.Count - 1); packaged.OffsetInFile = (UInt32)stream.Position; packaged.Flags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); var reader = info.MakeReader(); var uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); var compressed = BinUtils.Compress(uncompressed, Compression, CompressionLevel); stream.Write(compressed, 0, compressed.Length); reader.Dispose(); packaged.SizeOnDisk = (UInt32)(stream.Position - packaged.OffsetInFile); packaged.Crc = Crc32.Compute(compressed); var padLength = PaddingLength(); if (stream.Position % padLength > 0) { // Pad the file to a multiple of 64 bytes byte[] pad = new byte[padLength - (stream.Position % padLength)]; for (int i = 0; i < pad.Length; i++) { pad[i] = 0xAD; } stream.Write(pad, 0, pad.Length); } return(packaged); }
public void Write(Resource resource) { Compression = CompressionMethod.LZ4; CompressionLevel = CompressionLevel.MaxCompression; using (this.Writer = new BinaryWriter(Stream, Encoding.Default, true)) using (this.NodeStream = new MemoryStream()) using (this.NodeWriter = new BinaryWriter(NodeStream)) using (this.AttributeStream = new MemoryStream()) using (this.AttributeWriter = new BinaryWriter(AttributeStream)) using (this.ValueStream = new MemoryStream()) using (this.ValueWriter = new BinaryWriter(ValueStream)) { NextNodeIndex = 0; NextAttributeIndex = 0; NodeIndices = new Dictionary <Node, int>(); StringHashMap = new List <List <string> >(StringHashMapSize); while (StringHashMap.Count < StringHashMapSize) { StringHashMap.Add(new List <string>()); } WriteRegions(resource); byte[] stringBuffer = null; using (var stringStream = new MemoryStream()) using (var stringWriter = new BinaryWriter(stringStream)) { WriteStaticStrings(stringWriter); stringBuffer = stringStream.ToArray(); } var nodeBuffer = NodeStream.ToArray(); var attributeBuffer = AttributeStream.ToArray(); var valueBuffer = ValueStream.ToArray(); var header = new Header(); header.Magic = BitConverter.ToUInt32(Header.Signature, 0); header.Version = Version; header.EngineVersion = (resource.Metadata.majorVersion << 24) | (resource.Metadata.minorVersion << 16) | (resource.Metadata.revision << 8) | resource.Metadata.buildNumber; bool chunked = header.Version >= (ulong)FileVersion.VerChunkedCompress; byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, CompressionLevel); byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, CompressionLevel, chunked); byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, CompressionLevel, chunked); byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, CompressionLevel, chunked); header.StringsUncompressedSize = (UInt32)stringBuffer.Length; header.StringsSizeOnDisk = (UInt32)stringsCompressed.Length; header.NodesUncompressedSize = (UInt32)nodeBuffer.Length; header.NodesSizeOnDisk = (UInt32)nodesCompressed.Length; header.AttributesUncompressedSize = (UInt32)attributeBuffer.Length; header.AttributesSizeOnDisk = (UInt32)attributesCompressed.Length; header.ValuesUncompressedSize = (UInt32)valueBuffer.Length; header.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; header.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); header.Unknown2 = 0; header.Unknown3 = 0; header.Extended = ExtendedNodes ? 1u : 0u; BinUtils.WriteStruct <Header>(Writer, ref header); Writer.Write(stringsCompressed, 0, stringsCompressed.Length); Writer.Write(nodesCompressed, 0, nodesCompressed.Length); Writer.Write(attributesCompressed, 0, attributesCompressed.Length); Writer.Write(valuesCompressed, 0, valuesCompressed.Length); } }
public PackagedFileInfo WriteFile(AbstractFileInfo info) { // Assume that all files are written uncompressed (worst-case) when calculating package sizes uint size = info.Size(); if (_streams.Last().Position + size > MaxPackageSize) { // Start a new package file if the current one is full. string partPath = Package.MakePartFilename(_path, _streams.Count); var nextPart = File.Open(partPath, FileMode.Create, FileAccess.Write); _streams.Add(nextPart); } Stream stream = _streams.Last(); var packaged = new PackagedFileInfo { PackageStream = stream, Name = info.Name, UncompressedSize = size, ArchivePart = (UInt32)(_streams.Count - 1), OffsetInFile = (UInt32)stream.Position, Flags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel) }; Stream packagedStream = info.MakeStream(); byte[] compressed; try { using (var reader = new BinaryReader(packagedStream, Encoding.UTF8, true)) { byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); compressed = BinUtils.Compress(uncompressed, Compression, CompressionLevel); stream.Write(compressed, 0, compressed.Length); } } finally { info.ReleaseStream(); } packaged.SizeOnDisk = (UInt32)(stream.Position - packaged.OffsetInFile); packaged.Crc = Crc32.Compute(compressed, 0); int padLength = PaddingLength(); if (stream.Position % padLength <= 0) { return(packaged); } if ((_package.Metadata.Flags & PackageFlags.Solid) == 0) { // Pad the file to a multiple of 64 bytes var pad = new byte[padLength - stream.Position % padLength]; for (var i = 0; i < pad.Length; i++) { pad[i] = 0xAD; } stream.Write(pad, 0, pad.Length); } return(packaged); }