public PackagedFileInfo WriteFile(FileInfo info) { // Assume that all files are written uncompressed (worst-case) when calculating package sizes var size = info.Size(); if (streams.Last().Position + size > MaxPackageSize) { // Start a new package file if the current one is full. var partPath = Package.MakePartFilename(path, streams.Count); var nextPart = new FileStream(partPath, FileMode.Create, FileAccess.Write); streams.Add(nextPart); } var stream = streams.Last(); var packaged = new PackagedFileInfo(); packaged.PackageStream = stream; packaged.Name = info.Name; packaged.UncompressedSize = size; packaged.ArchivePart = (UInt32)(streams.Count - 1); packaged.OffsetInFile = (UInt32)stream.Position; packaged.Flags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); var reader = info.MakeReader(); var uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); var compressed = BinUtils.Compress(uncompressed, Compression, CompressionLevel); stream.Write(compressed, 0, compressed.Length); reader.Dispose(); packaged.SizeOnDisk = (UInt32)(stream.Position - packaged.OffsetInFile); packaged.Crc = Crc32.Compute(compressed); var padLength = PaddingLength(); if (stream.Position % padLength > 0) { // Pad the file to a multiple of 64 bytes byte[] pad = new byte[padLength - (stream.Position % padLength)]; for (int i = 0; i < pad.Length; i++) { pad[i] = 0xAD; } stream.Write(pad, 0, pad.Length); } return(packaged); }
public void WriteV13(FileStream mainStream) { long totalSize = _package.Files.Sum(p => (long)p.Size()); long currentSize = 0; var writtenFiles = new List <PackagedFileInfo>(); foreach (AbstractFileInfo file in _package.Files) { WriteProgress(file, currentSize, totalSize); writtenFiles.Add(WriteFile(file)); currentSize += file.Size(); } using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) { var header = new LSPKHeader13 { Version = (uint)Version, FileListOffset = (UInt32)mainStream.Position }; writer.Write((UInt32)writtenFiles.Count); var fileList = new MemoryStream(); var fileListWriter = new BinaryWriter(fileList); foreach (PackagedFileInfo file in writtenFiles) { FileEntry13 entry = file.MakeEntryV13(); BinUtils.WriteStruct(fileListWriter, ref entry); } byte[] fileListBuf = fileList.ToArray(); fileListWriter.Dispose(); byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); writer.Write(compressedFileList); header.FileListSize = (UInt32)mainStream.Position - header.FileListOffset; header.NumParts = (UInt16)_streams.Count; header.Priority = _package.Metadata.Priority; header.Flags = (byte)_package.Metadata.Flags; header.Md5 = ComputeArchiveHash(); BinUtils.WriteStruct(writer, ref header); writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(LSPKHeader13)))); writer.Write(Package.Signature); } }
private void WriteAttributeValue(BinaryWriter writer, NodeAttribute attr) { switch (attr.Type) { case NodeAttribute.DataType.DT_String: case NodeAttribute.DataType.DT_Path: case NodeAttribute.DataType.DT_FixedString: case NodeAttribute.DataType.DT_LSString: case NodeAttribute.DataType.DT_WString: case NodeAttribute.DataType.DT_LSWString: WriteString(writer, (string)attr.Value); break; case NodeAttribute.DataType.DT_TranslatedString: { var ts = (TranslatedString)attr.Value; if (Version >= (uint)FileVersion.VerBG3) { writer.Write(ts.Version); } else { WriteStringWithLength(writer, ts.Value ?? ""); } WriteStringWithLength(writer, ts.Handle); break; } case NodeAttribute.DataType.DT_TranslatedFSString: { var fs = (TranslatedFSString)attr.Value; WriteTranslatedFSString(writer, fs); break; } case NodeAttribute.DataType.DT_ScratchBuffer: { var buffer = (byte[])attr.Value; writer.Write(buffer); break; } default: BinUtils.WriteAttribute(writer, attr); break; } }
private Package ReadPackageV13(FileStream mainStream, BinaryReader reader) { var package = new Package(); var header = BinUtils.ReadStruct <LSPKHeader13>(reader); if (header.Version != (ulong)Package.CurrentVersion) { string msg = $"Unsupported package version {header.Version}; this extractor only supports {Package.CurrentVersion}"; throw new InvalidDataException(msg); } package.Metadata.Flags = (PackageFlags)header.Flags; package.Metadata.Priority = header.Priority; if (_metadataOnly) { return(package); } OpenStreams(mainStream, header.NumParts); mainStream.Seek(header.FileListOffset, SeekOrigin.Begin); int numFiles = reader.ReadInt32(); int fileBufferSize = Marshal.SizeOf(typeof(FileEntry13)) * numFiles; byte[] compressedFileList = reader.ReadBytes((int)header.FileListSize - 4); var uncompressedList = new byte[fileBufferSize]; int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); if (uncompressedSize != fileBufferSize) { string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; throw new InvalidDataException(msg); } var ms = new MemoryStream(uncompressedList); var msr = new BinaryReader(ms); var entries = new FileEntry13[numFiles]; BinUtils.ReadStructs(msr, entries); foreach (var entry in entries) { package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); } return(package); }
/// <summary> /// Reads the structure headers for the LSOF resource /// </summary> /// <param name="s">Stream to read the node headers from</param> /// <param name="longNodes">Use the long (V3) on-disk node format</param> private void ReadNodes(Stream s, bool longNodes) { #if DEBUG_LSF_SERIALIZATION Console.WriteLine(" ----- DUMP OF NODE TABLE -----"); #endif Nodes = new List <NodeInfo>(); using (var reader = new BinaryReader(s)) { Int32 index = 0; while (s.Position < s.Length) { var resolved = new NodeInfo(); #if DEBUG_LSF_SERIALIZATION var pos = s.Position; #endif if (longNodes) { var item = BinUtils.ReadStruct <NodeEntryV3>(reader); resolved.ParentIndex = item.ParentIndex; resolved.NameIndex = item.NameIndex; resolved.NameOffset = item.NameOffset; resolved.FirstAttributeIndex = item.FirstAttributeIndex; } else { var item = BinUtils.ReadStruct <NodeEntryV2>(reader); resolved.ParentIndex = item.ParentIndex; resolved.NameIndex = item.NameIndex; resolved.NameOffset = item.NameOffset; resolved.FirstAttributeIndex = item.FirstAttributeIndex; } #if DEBUG_LSF_SERIALIZATION Console.WriteLine(String.Format( "{0}: {1} @ {2:X} (parent {3}, firstAttribute {4})", index, Names[resolved.NameIndex][resolved.NameOffset], pos, resolved.ParentIndex, resolved.FirstAttributeIndex )); #endif Nodes.Add(resolved); index++; } } }
private void WriteAttribute(NodeAttribute attr) { switch (attr.Type) { case NodeAttribute.DataType.DT_String: case NodeAttribute.DataType.DT_Path: case NodeAttribute.DataType.DT_FixedString: case NodeAttribute.DataType.DT_LSString: WriteString((string)attr.Value, true); break; case NodeAttribute.DataType.DT_WString: case NodeAttribute.DataType.DT_LSWString: WriteWideString((string)attr.Value, true); break; case NodeAttribute.DataType.DT_TranslatedString: { var str = (TranslatedString)attr.Value; if (Version >= 4 && str.Value == null) { writer.Write(str.Version); } else { WriteString(str.Value ?? "", true); } WriteString(str.Handle, true); break; } case NodeAttribute.DataType.DT_ScratchBuffer: { var buffer = (byte[])attr.Value; writer.Write((UInt32)buffer.Length); writer.Write(buffer); break; } // DT_TranslatedFSString not supported in LSB default: BinUtils.WriteAttribute(writer, attr); break; } }
public void WriteV13(FileStream mainStream) { long totalSize = package.Files.Sum(p => (long)p.Size()); long currentSize = 0; List <PackagedFileInfo> writtenFiles = new List <PackagedFileInfo>(); foreach (var file in this.package.Files) { writeProgress(file, currentSize, totalSize); writtenFiles.Add(WriteFile(file)); currentSize += file.Size(); } using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) { var header = new LSPKHeader13(); header.Version = Version; header.FileListOffset = (UInt32)mainStream.Position; writer.Write((UInt32)writtenFiles.Count); var fileList = new MemoryStream(); var fileListWriter = new BinaryWriter(fileList); foreach (var file in writtenFiles) { var entry = file.MakeEntryV13(); BinUtils.WriteStruct <FileEntry13>(fileListWriter, ref entry); } var fileListBuf = fileList.ToArray(); fileListWriter.Dispose(); var compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); writer.Write(compressedFileList); header.FileListSize = (UInt32)mainStream.Position - header.FileListOffset; header.NumParts = (UInt16)streams.Count; header.SomePartVar = 0; // ??? header.ArchiveGuid = Guid.NewGuid(); BinUtils.WriteStruct <LSPKHeader13>(writer, ref header); writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(LSPKHeader13)))); writer.Write(Package.Signature); } }
private NodeAttribute ReadAttribute(NodeAttribute.DataType type) { switch (type) { case NodeAttribute.DataType.DT_String: case NodeAttribute.DataType.DT_Path: case NodeAttribute.DataType.DT_FixedString: case NodeAttribute.DataType.DT_LSString: { var attr = new NodeAttribute(type); attr.Value = ReadString(true); return(attr); } case NodeAttribute.DataType.DT_WString: case NodeAttribute.DataType.DT_LSWString: { var attr = new NodeAttribute(type); attr.Value = ReadWideString(true); return(attr); } case NodeAttribute.DataType.DT_TranslatedString: { var attr = new NodeAttribute(type); var str = new TranslatedString(); str.Value = ReadString(true); str.Handle = ReadString(true); attr.Value = str; return(attr); } case NodeAttribute.DataType.DT_ScratchBuffer: { var attr = new NodeAttribute(type); var bufferLength = reader.ReadInt32(); attr.Value = reader.ReadBytes(bufferLength); return(attr); } // DT_TranslatedFSString not supported in LSB default: return(BinUtils.ReadAttribute(type, reader)); } }
private void WriteNodeAttributesV2(Node node) { UInt32 lastOffset = (UInt32)ValueStream.Position; foreach (KeyValuePair <string, NodeAttribute> entry in node.Attributes) { WriteAttributeValue(ValueWriter, entry.Value); var attributeInfo = new AttributeEntryV2(); var length = (UInt32)ValueStream.Position - lastOffset; attributeInfo.TypeAndLength = (UInt32)entry.Value.Type | (length << 6); attributeInfo.NameHashTableIndex = AddStaticString(entry.Key); attributeInfo.NodeIndex = NextNodeIndex; BinUtils.WriteStruct <AttributeEntryV2>(AttributeWriter, ref attributeInfo); NextAttributeIndex++; lastOffset = (UInt32)ValueStream.Position; } }
public override Stream MakeStream() { if (_uncompressedStream != null) { return(_uncompressedStream); } var compressed = new byte[SizeOnDisk]; PackageStream.Seek(OffsetInFile, SeekOrigin.Begin); int readSize = PackageStream.Read(compressed, 0, (int)SizeOnDisk); if (readSize != SizeOnDisk) { string msg = $"Failed to read {SizeOnDisk} bytes from archive (only got {readSize})"; throw new InvalidDataException(msg); } if (Crc != 0) { UInt32 computedCrc = Crc32.Compute(compressed, 0); if (computedCrc != Crc) { string msg = $"CRC check failed on file '{Name}', archive is possibly corrupted. Expected {Crc,8:X}, got {computedCrc,8:X}"; throw new InvalidDataException(msg); } } if (Solid) { SolidStream.Seek(SolidOffset, SeekOrigin.Begin); byte[] uncompressed = new byte[UncompressedSize]; SolidStream.Read(uncompressed, 0, (int)UncompressedSize); _uncompressedStream = new MemoryStream(uncompressed); } else { byte[] uncompressed = BinUtils.Decompress(compressed, (int)Size(), (byte)Flags); _uncompressedStream = new MemoryStream(uncompressed); } return(_uncompressedStream); }
private Package ReadPackageV7(FileStream mainStream, BinaryReader reader) { var package = new Package(); mainStream.Seek(0, SeekOrigin.Begin); var header = BinUtils.ReadStruct <LSPKHeader7>(reader); OpenStreams(mainStream, (int)header.NumParts); for (uint i = 0; i < header.NumFiles; i++) { var entry = BinUtils.ReadStruct <FileEntry7>(reader); if (entry.ArchivePart == 0) { entry.OffsetInFile += header.DataOffset; } package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, streams[entry.ArchivePart])); } return(package); }
private void WriteFileListV15(BinaryWriter metadataWriter, List <PackagedFileInfo> files) { byte[] fileListBuf; using (var fileList = new MemoryStream()) using (var fileListWriter = new BinaryWriter(fileList)) { foreach (PackagedFileInfo file in files) { FileEntry15 entry = file.MakeEntryV15(); BinUtils.WriteStruct(fileListWriter, ref entry); } fileListBuf = fileList.ToArray(); } byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); metadataWriter.Write((UInt32)files.Count); metadataWriter.Write((UInt32)compressedFileList.Length); metadataWriter.Write(compressedFileList); }
private Package ReadPackageV13(FileStream mainStream, BinaryReader reader) { var package = new Package(); var header = BinUtils.ReadStruct <LSPKHeader13>(reader); if (header.Version != Package.CurrentVersion) { var msg = String.Format("Unsupported package version {0}; this extractor only supports {1}", header.Version, Package.CurrentVersion); throw new InvalidDataException(msg); } OpenStreams(mainStream, header.NumParts); mainStream.Seek(header.FileListOffset, SeekOrigin.Begin); int numFiles = reader.ReadInt32(); int fileBufferSize = Marshal.SizeOf(typeof(FileEntry13)) * numFiles; byte[] compressedFileList = reader.ReadBytes((int)header.FileListSize - 4); var uncompressedList = new byte[fileBufferSize]; var uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); if (uncompressedSize != fileBufferSize) { var msg = String.Format("LZ4 compressor disagrees about the size of file headers; expected {0}, got {1}", fileBufferSize, uncompressedSize); throw new InvalidDataException(msg); } var ms = new MemoryStream(uncompressedList); var msr = new BinaryReader(ms); for (int i = 0; i < numFiles; i++) { var entry = BinUtils.ReadStruct <FileEntry13>(msr); package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, streams[entry.ArchivePart])); } return(package); }
private Package ReadPackageV10(FileStream mainStream, BinaryReader reader) { var package = new Package(); mainStream.Seek(4, SeekOrigin.Begin); var header = BinUtils.ReadStruct <LSPKHeader10>(reader); OpenStreams(mainStream, header.NumParts); for (uint i = 0; i < header.NumFiles; i++) { var entry = BinUtils.ReadStruct <FileEntry13>(reader); if (entry.ArchivePart == 0) { entry.OffsetInFile += header.DataOffset; } // Add missing compression level flags entry.Flags = (entry.Flags & 0x0f) | 0x20; package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, streams[entry.ArchivePart])); } return(package); }
/// <summary> /// Reads the V3 attribute headers for the LSOF resource /// </summary> /// <param name="s">Stream to read the attribute headers from</param> private void ReadAttributesV3(Stream s) { Attributes = new List <AttributeInfo>(); using (var reader = new BinaryReader(s)) { while (s.Position < s.Length) { var attribute = BinUtils.ReadStruct <AttributeEntryV3>(reader); var resolved = new AttributeInfo(); resolved.NameIndex = attribute.NameIndex; resolved.NameOffset = attribute.NameOffset; resolved.TypeId = attribute.TypeId; resolved.Length = attribute.Length; resolved.DataOffset = attribute.Offset; resolved.NextAttributeIndex = attribute.NextAttributeIndex; Attributes.Add(resolved); } #if DEBUG_LSF_SERIALIZATION Console.WriteLine(" ----- DUMP OF V3 ATTRIBUTE TABLE -----"); for (int i = 0; i < Attributes.Count; i++) { var resolved = Attributes[i]; var debug = String.Format( "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4})", i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, resolved.TypeId, resolved.NextAttributeIndex ); Console.WriteLine(debug); } #endif } }
internal static PackagedFileInfo CreateFromEntry(FileEntry7 entry, Stream dataStream) { var info = new PackagedFileInfo { PackageStream = dataStream }; int nameLen; for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) { } info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); info.OffsetInFile = entry.OffsetInFile; info.SizeOnDisk = entry.SizeOnDisk; info.UncompressedSize = entry.UncompressedSize; info.ArchivePart = entry.ArchivePart; info.Crc = 0; info.Flags = entry.UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, CompressionLevel.DefaultCompression) : (uint)0; return(info); }
public void Write(Resource resource) { Compression = CompressionMethod.LZ4; CompressionLevel = CompressionLevel.MaxCompression; using (this.Writer = new BinaryWriter(Stream, Encoding.Default, true)) using (this.NodeStream = new MemoryStream()) using (this.NodeWriter = new BinaryWriter(NodeStream)) using (this.AttributeStream = new MemoryStream()) using (this.AttributeWriter = new BinaryWriter(AttributeStream)) using (this.ValueStream = new MemoryStream()) using (this.ValueWriter = new BinaryWriter(ValueStream)) { NextNodeIndex = 0; NextAttributeIndex = 0; NodeIndices = new Dictionary <Node, int>(); StringHashMap = new List <List <string> >(StringHashMapSize); while (StringHashMap.Count < StringHashMapSize) { StringHashMap.Add(new List <string>()); } WriteRegions(resource); byte[] stringBuffer = null; using (var stringStream = new MemoryStream()) using (var stringWriter = new BinaryWriter(stringStream)) { WriteStaticStrings(stringWriter); stringBuffer = stringStream.ToArray(); } var nodeBuffer = NodeStream.ToArray(); var attributeBuffer = AttributeStream.ToArray(); var valueBuffer = ValueStream.ToArray(); var header = new Header(); header.Magic = BitConverter.ToUInt32(Header.Signature, 0); header.Version = Version; header.EngineVersion = (resource.Metadata.majorVersion << 24) | (resource.Metadata.minorVersion << 16) | (resource.Metadata.revision << 8) | resource.Metadata.buildNumber; bool chunked = header.Version >= (ulong)FileVersion.VerChunkedCompress; byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, CompressionLevel); byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, CompressionLevel, chunked); byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, CompressionLevel, chunked); byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, CompressionLevel, chunked); header.StringsUncompressedSize = (UInt32)stringBuffer.Length; header.StringsSizeOnDisk = (UInt32)stringsCompressed.Length; header.NodesUncompressedSize = (UInt32)nodeBuffer.Length; header.NodesSizeOnDisk = (UInt32)nodesCompressed.Length; header.AttributesUncompressedSize = (UInt32)attributeBuffer.Length; header.AttributesSizeOnDisk = (UInt32)attributesCompressed.Length; header.ValuesUncompressedSize = (UInt32)valueBuffer.Length; header.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; header.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); header.Unknown2 = 0; header.Unknown3 = 0; header.Extended = ExtendedNodes ? 1u : 0u; BinUtils.WriteStruct <Header>(Writer, ref header); Writer.Write(stringsCompressed, 0, stringsCompressed.Length); Writer.Write(nodesCompressed, 0, nodesCompressed.Length); Writer.Write(attributesCompressed, 0, attributesCompressed.Length); Writer.Write(valuesCompressed, 0, valuesCompressed.Length); } }
private NodeAttribute ReadAttribute(NodeAttribute.DataType type) { switch (type) { case NodeAttribute.DataType.DT_String: case NodeAttribute.DataType.DT_Path: case NodeAttribute.DataType.DT_FixedString: case NodeAttribute.DataType.DT_LSString: { var attr = new NodeAttribute(type); attr.Value = ReadString(true); return(attr); } case NodeAttribute.DataType.DT_WString: case NodeAttribute.DataType.DT_LSWString: { var attr = new NodeAttribute(type); attr.Value = ReadWideString(true); return(attr); } case NodeAttribute.DataType.DT_TranslatedString: { var attr = new NodeAttribute(type); var str = new TranslatedString(); if (IsBG3) { str.Version = reader.ReadUInt16(); // Sometimes BG3 string keys still contain the value? // Weird heuristic to find these cases var test = reader.ReadUInt16(); if (test == 0) { stream.Seek(-4, SeekOrigin.Current); str.Version = 0; str.Value = ReadString(true); } else { stream.Seek(-2, SeekOrigin.Current); str.Value = null; } } else { str.Version = 0; str.Value = ReadString(true); } str.Handle = ReadString(true); attr.Value = str; return(attr); } case NodeAttribute.DataType.DT_ScratchBuffer: { var attr = new NodeAttribute(type); var bufferLength = reader.ReadInt32(); attr.Value = reader.ReadBytes(bufferLength); return(attr); } // DT_TranslatedFSString not supported in LSB default: return(BinUtils.ReadAttribute(type, reader)); } }
public Resource Read() { using (var reader = new BinaryReader(Stream)) { var hdr = BinUtils.ReadStruct <Header>(reader); if (hdr.Magic != BitConverter.ToUInt32(Header.Signature, 0)) { var msg = String.Format( "Invalid LSF signature; expected {0,8:X}, got {1,8:X}", BitConverter.ToUInt32(Header.Signature, 0), hdr.Magic ); throw new InvalidDataException(msg); } if (hdr.Version < (ulong)FileVersion.VerInitial || hdr.Version > (ulong)FileVersion.CurrentVersion) { var msg = String.Format("LSF version {0} is not supported", hdr.Version); throw new InvalidDataException(msg); } bool isCompressed = BinUtils.CompressionFlagsToMethod(hdr.CompressionFlags) != CompressionMethod.None; if (hdr.StringsSizeOnDisk > 0 || hdr.StringsUncompressedSize > 0) { uint onDiskSize = isCompressed ? hdr.StringsSizeOnDisk : hdr.StringsUncompressedSize; byte[] compressed = reader.ReadBytes((int)onDiskSize); byte[] uncompressed; if (isCompressed) { uncompressed = BinUtils.Decompress(compressed, (int)hdr.StringsUncompressedSize, hdr.CompressionFlags); } else { uncompressed = compressed; } #if DUMP_LSF_SERIALIZATION using (var nodesFile = new FileStream("names.bin", FileMode.Create, FileAccess.Write)) { nodesFile.Write(uncompressed, 0, uncompressed.Length); } #endif using (var namesStream = new MemoryStream(uncompressed)) { ReadNames(namesStream); } } if (hdr.NodesSizeOnDisk > 0 || hdr.NodesUncompressedSize > 0) { uint onDiskSize = isCompressed ? hdr.NodesSizeOnDisk : hdr.NodesUncompressedSize; var uncompressed = Decompress(reader, onDiskSize, hdr.NodesUncompressedSize, hdr); #if DUMP_LSF_SERIALIZATION using (var nodesFile = new FileStream("nodes.bin", FileMode.Create, FileAccess.Write)) { nodesFile.Write(uncompressed, 0, uncompressed.Length); } #endif using (var nodesStream = new MemoryStream(uncompressed)) { var longNodes = hdr.Version >= (ulong)FileVersion.VerExtendedNodes && hdr.Extended == 1; ReadNodes(nodesStream, longNodes); } } if (hdr.AttributesSizeOnDisk > 0 || hdr.AttributesUncompressedSize > 0) { uint onDiskSize = isCompressed ? hdr.AttributesSizeOnDisk : hdr.AttributesUncompressedSize; var uncompressed = Decompress(reader, onDiskSize, hdr.AttributesUncompressedSize, hdr); #if DUMP_LSF_SERIALIZATION using (var attributesFile = new FileStream("attributes.bin", FileMode.Create, FileAccess.Write)) { attributesFile.Write(uncompressed, 0, uncompressed.Length); } #endif using (var attributesStream = new MemoryStream(uncompressed)) { var longAttributes = hdr.Version >= (ulong)FileVersion.VerExtendedNodes && hdr.Extended == 1; if (longAttributes) { ReadAttributesV3(attributesStream); } else { ReadAttributesV2(attributesStream); } } } if (hdr.ValuesSizeOnDisk > 0 || hdr.ValuesUncompressedSize > 0) { uint onDiskSize = isCompressed ? hdr.ValuesSizeOnDisk : hdr.ValuesUncompressedSize; var uncompressed = Decompress(reader, onDiskSize, hdr.ValuesUncompressedSize, hdr); var valueStream = new MemoryStream(uncompressed); this.Values = valueStream; #if DUMP_LSF_SERIALIZATION using (var valuesFile = new FileStream("values.bin", FileMode.Create, FileAccess.Write)) { valuesFile.Write(uncompressed, 0, uncompressed.Length); } #endif } else { this.Values = new MemoryStream(); } Resource resource = new Resource(); ReadRegions(resource); resource.Metadata.majorVersion = (hdr.EngineVersion & 0xff000000) >> 24; resource.Metadata.minorVersion = (hdr.EngineVersion & 0xff0000) >> 16; resource.Metadata.revision = (hdr.EngineVersion & 0xff00) >> 8; resource.Metadata.buildNumber = (hdr.EngineVersion & 0xff); return(resource); } }
/// <summary> /// Reads the V2 attribute headers for the LSOF resource /// </summary> /// <param name="s">Stream to read the attribute headers from</param> private void ReadAttributesV2(Stream s) { Attributes = new List <AttributeInfo>(); using (var reader = new BinaryReader(s)) { #if DEBUG_LSF_SERIALIZATION var rawAttributes = new List <AttributeEntryV2>(); #endif var prevAttributeRefs = new List <Int32>(); UInt32 dataOffset = 0; Int32 index = 0; while (s.Position < s.Length) { var attribute = BinUtils.ReadStruct <AttributeEntryV2>(reader); var resolved = new AttributeInfo(); resolved.NameIndex = attribute.NameIndex; resolved.NameOffset = attribute.NameOffset; resolved.TypeId = attribute.TypeId; resolved.Length = attribute.Length; resolved.DataOffset = dataOffset; resolved.NextAttributeIndex = -1; var nodeIndex = attribute.NodeIndex + 1; if (prevAttributeRefs.Count > nodeIndex) { if (prevAttributeRefs[nodeIndex] != -1) { Attributes[prevAttributeRefs[nodeIndex]].NextAttributeIndex = index; } prevAttributeRefs[nodeIndex] = index; } else { while (prevAttributeRefs.Count < nodeIndex) { prevAttributeRefs.Add(-1); } prevAttributeRefs.Add(index); } #if DEBUG_LSF_SERIALIZATION rawAttributes.Add(attribute); #endif dataOffset += resolved.Length; Attributes.Add(resolved); index++; } #if DEBUG_LSF_SERIALIZATION Console.WriteLine(" ----- DUMP OF ATTRIBUTE REFERENCES -----"); for (int i = 0; i < prevAttributeRefs.Count; i++) { Console.WriteLine(String.Format("Node {0}: last attribute {1}", i, prevAttributeRefs[i])); } Console.WriteLine(" ----- DUMP OF V2 ATTRIBUTE TABLE -----"); for (int i = 0; i < Attributes.Count; i++) { var resolved = Attributes[i]; var attribute = rawAttributes[i]; var debug = String.Format( "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4}, node {5})", i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, resolved.TypeId, resolved.NextAttributeIndex, attribute.NodeIndex ); Console.WriteLine(debug); } #endif } }
public PackagedFileInfo WriteFile(AbstractFileInfo info) { // Assume that all files are written uncompressed (worst-case) when calculating package sizes uint size = info.Size(); if (_streams.Last().Position + size > MaxPackageSize) { // Start a new package file if the current one is full. string partPath = Package.MakePartFilename(_path, _streams.Count); var nextPart = File.Open(partPath, FileMode.Create, FileAccess.Write); _streams.Add(nextPart); } Stream stream = _streams.Last(); var packaged = new PackagedFileInfo { PackageStream = stream, Name = info.Name, UncompressedSize = size, ArchivePart = (UInt32)(_streams.Count - 1), OffsetInFile = (UInt32)stream.Position, Flags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel) }; Stream packagedStream = info.MakeStream(); byte[] compressed; try { using (var reader = new BinaryReader(packagedStream, Encoding.UTF8, true)) { byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); compressed = BinUtils.Compress(uncompressed, Compression, CompressionLevel); stream.Write(compressed, 0, compressed.Length); } } finally { info.ReleaseStream(); } packaged.SizeOnDisk = (UInt32)(stream.Position - packaged.OffsetInFile); packaged.Crc = Crc32.Compute(compressed, 0); int padLength = PaddingLength(); if (stream.Position % padLength <= 0) { return(packaged); } if ((_package.Metadata.Flags & PackageFlags.Solid) == 0) { // Pad the file to a multiple of 64 bytes var pad = new byte[padLength - stream.Position % padLength]; for (var i = 0; i < pad.Length; i++) { pad[i] = 0xAD; } stream.Write(pad, 0, pad.Length); } return(packaged); }
private Package ReadPackageV13(FileStream mainStream, BinaryReader reader) { var package = new Package(); var header = BinUtils.ReadStruct <LSPKHeader13>(reader); if (header.Version != (ulong)PackageVersion.V13) { string msg = $"Unsupported package version {header.Version}; this package layout is only supported for {PackageVersion.V13}"; throw new InvalidDataException(msg); } package.Metadata.Flags = (PackageFlags)header.Flags; package.Metadata.Priority = header.Priority; package.Version = PackageVersion.V13; if (_metadataOnly) { return(package); } OpenStreams(mainStream, header.NumParts); mainStream.Seek(header.FileListOffset, SeekOrigin.Begin); int numFiles = reader.ReadInt32(); int fileBufferSize = Marshal.SizeOf(typeof(FileEntry13)) * numFiles; byte[] compressedFileList = reader.ReadBytes((int)header.FileListSize - 4); var uncompressedList = new byte[fileBufferSize]; int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); if (uncompressedSize != fileBufferSize) { string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; throw new InvalidDataException(msg); } var ms = new MemoryStream(uncompressedList); var msr = new BinaryReader(ms); var entries = new FileEntry13[numFiles]; BinUtils.ReadStructs(msr, entries); if ((package.Metadata.Flags & PackageFlags.Solid) == PackageFlags.Solid && numFiles > 0) { // Calculate compressed frame offset and bounds uint totalUncompressedSize = 0; uint totalSizeOnDisk = 0; uint firstOffset = 0xffffffff; uint lastOffset = 0; foreach (var entry in entries) { totalUncompressedSize += entry.UncompressedSize; totalSizeOnDisk += entry.SizeOnDisk; if (entry.OffsetInFile < firstOffset) { firstOffset = entry.OffsetInFile; } if (entry.OffsetInFile + entry.SizeOnDisk > lastOffset) { lastOffset = entry.OffsetInFile + entry.SizeOnDisk; } } if (firstOffset != 7 || lastOffset - firstOffset != totalSizeOnDisk) { string msg = $"Incorrectly compressed solid archive; offsets {firstOffset}/{lastOffset}, bytes {totalSizeOnDisk}"; throw new InvalidDataException(msg); } // Decompress all files as a single frame (solid) byte[] frame = new byte[lastOffset]; mainStream.Seek(0, SeekOrigin.Begin); mainStream.Read(frame, 0, (int)lastOffset); byte[] decompressed = Native.LZ4FrameCompressor.Decompress(frame); var decompressedStream = new MemoryStream(decompressed); // Update offsets to point to the decompressed chunk uint offset = 7; uint compressedOffset = 0; foreach (var entry in entries) { if (entry.OffsetInFile != offset) { throw new InvalidDataException("File list in solid archive not contiguous"); } var file = PackagedFileInfo.CreateSolidFromEntry(entry, _streams[entry.ArchivePart], compressedOffset, decompressedStream); package.Files.Add(file); offset += entry.SizeOnDisk; compressedOffset += entry.UncompressedSize; } } else { foreach (var entry in entries) { package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); } } return(package); }