/// <summary> /// Extracts and decompresses a page, copying it to an output stream. /// </summary> /// <param name="page">The page to decompress and extract.</param> /// <param name="inStream">The Stream open on the extractor's cache file to read the page from.</param> /// <param name="outStream">The Stream to write the extracted page to.</param> public void ExtractPage(ResourcePage page, Stream inStream, Stream outStream) { inStream.Position = _rawTable.Offset + page.Offset; switch (page.CompressionMethod) { case ResourcePageCompression.None: StreamUtil.Copy(inStream, outStream, page.UncompressedSize); break; case ResourcePageCompression.Deflate: var deflate = new DeflateStream(inStream, CompressionMode.Decompress, true); StreamUtil.Copy(deflate, outStream, page.UncompressedSize); break; default: throw new NotSupportedException("Unsupported compression method"); } }
/// <summary> /// Extracts and decompresses a page, copying it to an output stream. /// </summary> /// <param name="page">The page to decompress and extract.</param> /// <param name="inStream">The Stream open on the extractor's cache file to read the page from.</param> /// <param name="outStream">The Stream to write the extracted page to.</param> public void ExtractDecompressPage(ResourcePage page, Stream inStream, Stream outStream) { inStream.Position = _rawTable.Offset + page.Offset; switch (page.CompressionMethod) { case ResourcePageCompression.None: StreamUtil.Copy(inStream, outStream, page.UncompressedSize); break; case ResourcePageCompression.Deflate: var deflate = new DeflateStream(inStream, CompressionMode.Decompress, true); StreamUtil.Copy(deflate, outStream, page.UncompressedSize); break; default: throw new NotSupportedException("Unsupported compression method"); } }
public int InjectPage(IStream cacheStream, ResourcePage page, byte[] data) { const int pageSize = 0x1000; // Calculate how many pages we need to add to the raw table var pagesNeeded = (int)(Math.Ceiling(page.CompressedSize / (double)pageSize)); // calculate sizes, positions and resize the raw table var injectSize = pagesNeeded * pageSize; var offsetInCache = (_rawTable.Offset + _rawTable.Size); var offsetInRaw = (_rawTable.Size); _rawTable.Resize(_rawTable.Size + injectSize, cacheStream); // write the raw page into the table cacheStream.SeekTo(offsetInCache); cacheStream.WriteBlock(data); return offsetInRaw; }
public int InjectPage(IStream cacheStream, ResourcePage page, byte[] data) { const int pageSize = 0x1000; // Calculate how many pages we need to add to the raw table var pagesNeeded = (int)(Math.Ceiling(page.CompressedSize / (double)pageSize)); // calculate sizes, positions and resize the raw table var injectSize = pagesNeeded * pageSize; var offsetInCache = (_rawTable.Offset + _rawTable.Size); var offsetInRaw = (_rawTable.Size); _rawTable.Resize(_rawTable.Size + injectSize, cacheStream); // write the raw page into the table cacheStream.SeekTo(offsetInCache); cacheStream.WriteBlock(data); return(offsetInRaw); }
public int InjectExtractedResourcePage(ResourcePage resourcePage, ExtractedPage extractedPage, IStream stream) { if (extractedPage == null) throw new ArgumentNullException("extractedPage"); var injector = new ResourcePageInjector(_cacheFile); var rawOffset = injector.InjectPage(stream, resourcePage, extractedPage.ExtractedPageData); _extractedResourcePages[extractedPage] = extractedPage.ResourcePageIndex; return rawOffset; }
public int InjectResourcePage(ResourcePage page, IStream stream) { if (_resources == null) return -1; if (page == null) throw new ArgumentNullException("page is null"); // Don't inject the page if it's already been injected int newIndex; if (_pageIndices.TryGetValue(page, out newIndex)) return newIndex; // Add the page and associate its new index with it var extractedRaw = _container.FindExtractedResourcePage(page.Index); newIndex = _resources.Pages.Count; page.Index = newIndex; // haxhaxhax, oh aaron LoadResourceTable(stream); // Inject? if (extractedRaw != null) { var rawOffset = InjectExtractedResourcePage(page, extractedRaw, stream); page.Offset = rawOffset; page.FilePath = null; } _resources.Pages.Add(page); _pageIndices[page] = newIndex; return newIndex; }
private static ResourcePage ReadResourcePage(IReader reader, byte version) { if (version > 1) throw new InvalidOperationException("Unrecognized \"rspg\" block version"); var page = new ResourcePage(); page.Index = reader.ReadInt32(); if (version > 0) page.Salt = reader.ReadUInt16(); page.Flags = reader.ReadByte(); page.FilePath = reader.ReadAscii(); if (page.FilePath.Length == 0) page.FilePath = null; page.Offset = reader.ReadInt32(); page.UncompressedSize = reader.ReadInt32(); page.CompressionMethod = (ResourcePageCompression) reader.ReadByte(); page.CompressedSize = reader.ReadInt32(); page.Checksum = reader.ReadUInt32(); page.Hash1 = ReadByteArray(reader); page.Hash2 = ReadByteArray(reader); page.Hash3 = ReadByteArray(reader); page.Unknown1 = reader.ReadInt32(); page.Unknown2 = reader.ReadInt32(); page.Unknown3 = reader.ReadInt32(); return page; }
/// <summary> /// Extracts a page, copying it to an output stream. /// </summary> /// <param name="page">The page to decompress and extract.</param> /// <param name="inStream">The Stream open on the extractor's cache file to read the page from.</param> /// <param name="outStream">The Stream to write the extracted page to.</param> public void ExtractPage(ResourcePage page, Stream inStream, Stream outStream) { inStream.Position = (page.Offset != -1 ? page.Offset : 0) + (_fileOffsets ? 0 : _rawTable.Offset); StreamUtil.Copy(inStream, outStream, page.CompressedSize); }
public int InjectResourcePage(ResourcePage page, IReader reader) { if (page == null) throw new ArgumentNullException("page is null"); // Don't inject the page if it's already been injected int newIndex; if (_pageIndices.TryGetValue(page, out newIndex)) return newIndex; // Add the page and associate its new index with it newIndex = _resources.Pages.Count; page.Index = newIndex; // haxhaxhax LoadResourceTable(reader); _resources.Pages.Add(page); _pageIndices[page] = newIndex; return newIndex; }
private StructureValueCollection SerializePage(ResourcePage page, FourthGenCacheFileReference[] externalFiles) { var result = new StructureValueCollection(); result.SetInteger("salt", page.Salt); result.SetInteger("flags", page.Flags); result.SetInteger("compression codec index", (page.CompressionMethod != ResourcePageCompression.None) ? 0 : 0xFFFFFFFF); result.SetInteger("shared cache file index", (page.FilePath != null) ? (uint) FindExternalFile(externalFiles, page.FilePath) : 0xFFFFFFFF); result.SetInteger("unknown 1", (uint) page.Unknown1); result.SetInteger("compressed block offset", (uint) page.Offset); result.SetInteger("compressed block size", (uint) page.CompressedSize); result.SetInteger("uncompressed block size", (uint) page.UncompressedSize); result.SetInteger("checksum", page.Checksum); result.SetRaw("hash 1", page.Hash1); result.SetRaw("hash 2", page.Hash2); result.SetRaw("hash 3", page.Hash3); result.SetInteger("unknown 2", (uint) page.Unknown2); result.SetInteger("unknown 3", (uint) page.Unknown3); return result; }
private ResourcePage LoadPage(StructureValueCollection values, int index, FourthGenCacheFileReference[] externalFiles) { var result = new ResourcePage(); result.Index = index; result.Salt = (ushort) values.GetInteger("salt"); result.Flags = (byte) values.GetInteger("flags"); result.CompressionMethod = ((int) values.GetInteger("compression codec index") != -1) ? ResourcePageCompression.Deflate : ResourcePageCompression.None; // FIXME: hax/laziness var externalFile = (int) values.GetInteger("shared cache file index"); result.FilePath = (externalFile != -1) ? externalFiles[externalFile].Path : null; result.Unknown1 = (int) values.GetIntegerOrDefault("unknown 1", 0); result.Offset = (int) values.GetInteger("compressed block offset"); result.CompressedSize = (int) values.GetInteger("compressed block size"); result.UncompressedSize = (int) values.GetInteger("uncompressed block size"); result.Checksum = values.GetInteger("checksum"); result.Hash1 = values.GetRaw("hash 1"); result.Hash2 = values.GetRaw("hash 2"); result.Hash3 = values.GetRaw("hash 3"); result.Unknown2 = (int) values.GetIntegerOrDefault("unknown 2", 0); result.Unknown3 = (int) values.GetIntegerOrDefault("unknown 3", 0); return result; }
/// <summary> /// Adds information about a resource page to the container. /// </summary> /// <param name="page">The page to add.</param> public void AddResourcePage(ResourcePage page) { _pagesByIndex[page.Index] = page; }
/// <summary> /// Extracts a page, copying it to an output stream. /// </summary> /// <param name="page">The page to decompress and extract.</param> /// <param name="inStream">The Stream open on the extractor's cache file to read the page from.</param> /// <param name="outStream">The Stream to write the extracted page to.</param> public void ExtractPage(ResourcePage page, Stream inStream, Stream outStream) { inStream.Position = _rawTable.Offset + page.Offset; StreamUtil.Copy(inStream, outStream, page.CompressedSize); }