/// <summary> /// Determines whether this is a compressed archive and returns true or false. /// Does not advance stream. /// </summary> /// <param name="reader">The stream reader.</param> public static bool IsCompressed(EndianStreamReader reader) { var pos = reader.Position(); bool isCompressed = reader.Read <uint>() == _signature; reader.Seek(pos, SeekOrigin.Begin); return(isCompressed); }
/// <summary> /// Reads an archive from a stream. /// </summary> /// <param name="stream">Stream pointing to the start of the archive.</param> /// <param name="archiveSize">Size of the archive file.</param> /// <param name="bigEndian">True if big endian.</param> public ArchiveReader(Stream stream, int archiveSize, bool bigEndian) { _stream = stream; // Extract Data. using var streamReader = new BufferedStreamReader(stream, 2048); using EndianStreamReader endianStreamReader = bigEndian ? (EndianStreamReader) new BigEndianStreamReader(streamReader) : new LittleEndianStreamReader(streamReader); // Texture Count endianStreamReader.Read(out short texCount); endianStreamReader.Seek(2, SeekOrigin.Current); Files = new UnpackTextureFile[texCount]; // Get Texture Offsets for (int x = 0; x < texCount; x++) { Files[x].Offset = endianStreamReader.Read <int>(); } // Get texture Sizes // Note: We are actually reading some extra bytes as files are padded to 32 bytes. for (int x = 0; x < texCount - 1; x++) { Files[x].Size = Files[x + 1].Offset - Files[x].Offset; } Files[texCount - 1].Size = (archiveSize - Files[texCount - 1].Offset); // Read Texture Flags for (int x = 0; x < texCount; x++) { Files[x].PadFlag = endianStreamReader.Read <byte>(); } // Read Texture Names for (int x = 0; x < texCount; x++) { Files[x].Name = streamReader.ReadString(); } }
/// <summary> /// Reads an archive from a stream. /// </summary> /// <param name="stream">Stream pointing to the start of the archive.</param> /// <param name="archiveSize">Size of the archive file.</param> /// <param name="bigEndian">True if big endian.</param> public ArchiveReader(Stream stream, int archiveSize, bool bigEndian) { _stream = stream; _startPos = stream.Position; // Extract Data. using var streamReader = new BufferedStreamReader(stream, 2048); using EndianStreamReader endianStreamReader = bigEndian ? (EndianStreamReader) new BigEndianStreamReader(streamReader) : new LittleEndianStreamReader(streamReader); endianStreamReader.Read(out int binCount); Groups = new Group[binCount]; // Get group item counts. for (int x = 0; x < Groups.Length; x++) { Groups[x].Files = new Structs.Parser.File[endianStreamReader.Read <byte>()]; } // Alignment endianStreamReader.Seek(Utilities.Utilities.RoundUp((int)endianStreamReader.Position(), 4) - endianStreamReader.Position(), SeekOrigin.Current); // Skip section containing first item for each group. endianStreamReader.Seek(sizeof(short) * Groups.Length, SeekOrigin.Current); // Populate IDs for (int x = 0; x < Groups.Length; x++) { Groups[x].Id = endianStreamReader.Read <ushort>(); } // Populate offsets. int[] offsets = new int[Groups.Select(x => x.Files.Length).Sum()]; for (int x = 0; x < offsets.Length; x++) { offsets[x] = endianStreamReader.Read <int>(); } int offsetIndex = 0; for (int x = 0; x < Groups.Length; x++) { var fileCount = Groups[x].Files.Length; for (int y = 0; y < fileCount; y++) { // Do not fill if no more elements left. if (offsetIndex >= offsets.Length) { break; } var offset = (int)offsets[offsetIndex]; int nextOffsetIndex = offsetIndex; offsetIndex += 1; // Find next non-zero value within array; if not found, use archive size.. do { nextOffsetIndex += 1; }while (nextOffsetIndex < offsets.Length && offsets[nextOffsetIndex] == 0); var nextOffset = nextOffsetIndex < offsets.Length ? offsets[nextOffsetIndex] : archiveSize; // Set offsets Groups[x].Files[y].Offset = offset; Groups[x].Files[y].Size = nextOffset - offset; } } }