public void DirectoryHeader_ThrowsExceptionWhenLoading() { var newHeader = DirectoryHeader.New(); newHeader.GetType().GetProperty(nameof(newHeader.DirectoryId)).SetValue(newHeader, "currupt**"); newHeader.Save("header.hx", DerivedBytesProvider.FromPassword("password"), HelixFileVersion.UnitTest); try { DirectoryHeader.Load("header.hx", DerivedBytesProvider.FromPassword("password")); Assert.IsTrue(false, "Did not detect curruption"); } catch (HelixException) { } }
public void DirectoryHeader_NewSaveLoad() { var newHeader = DirectoryHeader.New(); DateTime start = DateTime.Now; newHeader.Save("header.hx", DerivedBytesProvider.FromPassword("password"), HelixFileVersion.UnitTest); double dur = (DateTime.Now - start).TotalMilliseconds; var loadHeader = DirectoryHeader.Load("header.hx", DerivedBytesProvider.FromPassword("password")); //Assert.AreEqual(newHeader.DerivedBytesProvider.GetDerivedBytes().Key.ToHex(), loadHeader.DerivedBytesProvider.Key.ToHex()); //Assert.AreEqual(newHeader.DerivedBytesProvider.GetDerivedBytes().Salt.ToHex(), loadHeader.DerivedBytesProvider.Salt.ToHex()); Assert.AreEqual(newHeader.FileNameKey.ToHex(), loadHeader.FileNameKey.ToHex()); Assert.AreEqual(newHeader.DirectoryId, loadHeader.DirectoryId); File.Delete("header.hx"); }
public void Deserialize(Stream input) { var magic = input.ReadValueU16(Endian.Little); if (magic != Signature && magic.Swap() != Signature) { throw new FormatException(); } var endian = magic == Signature ? Endian.Little : Endian.Big; var nameCount = input.ReadValueU16(endian); var directoryCount = input.ReadValueU16(endian); var unknown06 = input.ReadValueU16(endian); if (unknown06 != 4) { throw new FormatException(); } var fileTableOffset = input.ReadValueU32(endian); var totalSize = input.ReadValueU32(endian); // size of FILETABLE.BIN var titleId1 = input.ReadString(16, true, Encoding.ASCII); var titleId2 = input.ReadString(16, true, Encoding.ASCII); var unknown30 = input.ReadValueU16(endian); var unknown32 = input.ReadValueU8(); var parentalLevel = input.ReadValueU8(); if (unknown30 != 0 || (unknown32 != 0 && unknown32 != 1)) { throw new FormatException(); } var installDataCryptoKey = input.ReadBytes(16); var nameHeaders = new NameHeader[nameCount]; for (int i = 0; i < nameCount; i++) { nameHeaders[i] = NameHeader.Read(input, endian); } var directoryHeaders = new DirectoryHeader[directoryCount]; for (int i = 0; i < directoryCount; i++) { var directoryHeader = directoryHeaders[i] = DirectoryHeader.Read(input, endian); if (directoryHeader.Unknown02 != 0 || directoryHeader.Unknown08 != 0) { throw new FormatException(); } } var totalBatchCount = directoryHeaders.Sum(s => s.BatchCount); if (input.Position + (totalBatchCount * 8) != fileTableOffset) { throw new InvalidOperationException(); } var batchHeaders = new BatchHeader[totalBatchCount]; for (int i = 0; i < totalBatchCount; i++) { batchHeaders[i] = BatchHeader.Read(input, endian); } var totalFileCount = batchHeaders.Sum(d => d.FileCount); int fileTableSize = 0; foreach (var batchHeader in batchHeaders) { var fileTableEntrySizeIndex = (int)batchHeader.Flags; if (fileTableEntrySizeIndex == 1 || fileTableEntrySizeIndex > 5) { throw new NotSupportedException(); } fileTableSize += batchHeader.FileCount * _FileTableEntrySizes[fileTableEntrySizeIndex]; } if (totalSize - fileTableOffset != fileTableSize) { throw new InvalidOperationException(); } var directories = new List <DirectoryEntry>(); using (var data = input.ReadToMemoryStream(fileTableSize)) { foreach (var directoryHeader in directoryHeaders) { var batchIndexBase = directoryHeader.BatchTableOffset / 8; var directory = new DirectoryEntry() { Id = directoryHeader.Id, DataBlockSize = directoryHeader.DataBlockSize, DataBaseOffset = directoryHeader.DataBaseOffset, IsInInstallData = directoryHeader.IsInInstallData, }; for (int i = 0; i < directoryHeader.BatchCount; i++) { if ((directoryHeader.BatchTableOffset % 8) != 0) { throw new FormatException(); } var batchHeader = batchHeaders[batchIndexBase + i]; var readDataHeader = _FileTableEntryReaders[(int)batchHeader.Flags]; if (readDataHeader == null) { throw new NotSupportedException(); } data.Position = batchHeader.FileTableOffset; ushort fileId = batchHeader.BaseFileId; for (int j = 0; j < batchHeader.FileCount; j++, fileId++) { var fileHeader = readDataHeader(data, endian); uint?nameHash = null; if (directoryHeader.NameTableCount > 0) { if (directoryHeader.NameTableIndex == 0xFFFF) { throw new InvalidOperationException(); } var nameIndex = Array.FindIndex( nameHeaders, directoryHeader.NameTableIndex, directoryHeader.NameTableCount, nte => nte.DirectoryId == directoryHeader.Id && nte.FileId == fileId); if (nameIndex >= 0) { nameHash = nameHeaders[nameIndex].NameHash; } } FileEntry file; file.Id = fileId; file.NameHash = nameHash; file.DataBlockOffset = fileHeader.DataBlockOffset; file.DataSize = fileHeader.DataSize; directory.Files.Add(file); } } directories.Add(directory); } this.Endian = endian; this.TitleId1 = titleId1; this.TitleId2 = titleId2; this.Unknown32 = unknown32; this.ParentalLevel = parentalLevel; this.InstallDataCryptoKey = installDataCryptoKey; this.Directories.Clear(); this.Directories.AddRange(directories); } }
static void Main(string[] args) { string inFileName = ""; string tempFileName; string baseDirectory = "out"; long customOffset = 0; for (int i = 0; i < args.Length; i++) { if (args[i] == "-f") { inFileName = args[i + 1]; } if (args[i] == "-d") { baseDirectory = args[i + 1]; } if (args[i] == "--offset") { customOffset = Convert.ToInt64(args[i + 1]); } } if (inFileName.Length == 0 || !File.Exists(inFileName)) { Console.WriteLine("Usage: qicstreamv2 -f <file name> [-d <output directory>]"); return; } byte[] bytes = new byte[65536]; tempFileName = inFileName + ".tmp"; // Pass 1: remove unused bytes (parity?) from the original file, and write to temporary file using (var stream = new FileStream(inFileName, FileMode.Open, FileAccess.Read)) { using (var outStream = new FileStream(tempFileName, FileMode.Create, FileAccess.Write)) { while (stream.Position < stream.Length) { stream.Read(bytes, 0, 0x8000); // Each block of 0x8000 bytes ends with 0x402 bytes of something (perhaps for parity checking) // We'll just remove it and write the good bytes to the temporary file. outStream.Write(bytes, 0, 0x8000 - 0x402); } } } // Pass 2: extract files. try { using (var stream = new FileStream(tempFileName, FileMode.Open, FileAccess.Read)) { if (customOffset == 0) { // Read the volume header, which doesn't really contain much vital information. stream.Read(bytes, 0, 0x3e); string magic = Encoding.ASCII.GetString(bytes, 4, 4); if (magic != "FSET") { throw new ApplicationException("Incorrect magic value."); } // The volume header continues with a dynamically-sized volume label: int volNameLen = BitConverter.ToUInt16(bytes, 0x3C); stream.Read(bytes, 0, volNameLen); string volName = Encoding.ASCII.GetString(bytes, 0, volNameLen); Console.WriteLine("Backup label: " + volName); // ...followed by a dynamically-sized drive name: // (which must be aligned on a 2-byte boundary) if (stream.Position % 2 == 1) { stream.ReadByte(); } stream.Read(bytes, 0, 2); int driveNameLen = BitConverter.ToUInt16(bytes, 0); stream.Read(bytes, 0, driveNameLen); string driveName = Encoding.ASCII.GetString(bytes, 0, driveNameLen); Console.WriteLine("Drive name: " + driveName); } else { // adjust offset to account for removed bytes customOffset -= ((customOffset / 0x8000) * 0x402); stream.Seek(customOffset, SeekOrigin.Begin); } // Maintain a list of subdirectories into which we'll descend and un-descend. List <string> currentDirList = new List <string>(); Directory.CreateDirectory(baseDirectory); string currentDirectory = baseDirectory; bool isCatalogRegion = false; // And now begins the main sequence of the backup, which consists of a control code, // followed by the data (if any) that the control code represents. while (stream.Position < stream.Length) { ControlCode code = (ControlCode)stream.ReadByte(); if (code == ControlCode.CatalogStart) { isCatalogRegion = true; } else if (code == ControlCode.ContentsStart) { isCatalogRegion = false; } else if (code == ControlCode.ParentDirectory && !isCatalogRegion) { // Go "up" to the parent directory if (currentDirList.Count > 0) { currentDirList.RemoveAt(currentDirList.Count - 1); } } else if (code == ControlCode.Directory) { // This control code is followed by a directory header which tells us the name // of the directory that we're descending into. var header = new DirectoryHeader(stream); if (!isCatalogRegion) { currentDirList.Add(header.Name); currentDirectory = baseDirectory; for (int i = 0; i < currentDirList.Count; i++) { currentDirectory = Path.Combine(currentDirectory, currentDirList[i]); } Directory.CreateDirectory(currentDirectory); Directory.SetCreationTime(currentDirectory, header.DateTime); Directory.SetLastWriteTime(currentDirectory, header.DateTime); Console.WriteLine(stream.Position.ToString("X") + ": New directory - " + currentDirectory + " - " + header.DateTime.ToShortDateString()); } } else if (code == ControlCode.File) { // This control code is followed by a file header which tells us all the details // about the file, followed by the actual file contents. var header = new FileHeader(stream); if (!header.Valid) { Console.WriteLine("Invalid file header, probably end of archive."); break; } if (!isCatalogRegion) { string fileName = Path.Combine(currentDirectory, header.Name); using (var f = new FileStream(Path.Combine(currentDirectory, header.Name), FileMode.Create, FileAccess.Write)) { int bytesLeft = header.Size; while (bytesLeft > 0) { do { if (stream.Position >= stream.Length) { return; } code = (ControlCode)stream.ReadByte(); }while (code != ControlCode.DataChunk); stream.Read(bytes, 0, 3); int chunkSize = BitConverter.ToUInt16(bytes, 1); stream.Read(bytes, 0, chunkSize); f.Write(bytes, 0, chunkSize); if (bytesLeft == header.Size) { if (!VerifyFileFormat(header.Name, bytes)) { Console.WriteLine(stream.Position.ToString("X") + " -- Warning: file format doesn't match: " + fileName); Console.ReadKey(); } } bytesLeft -= chunkSize; } } File.SetCreationTime(fileName, header.DateTime); File.SetLastWriteTime(fileName, header.DateTime); File.SetAttributes(fileName, header.Attributes); Console.WriteLine(stream.Position.ToString("X") + ": " + fileName + " - " + header.Size.ToString() + " bytes - " + header.DateTime.ToShortDateString()); } } } } } catch (Exception e) { Console.WriteLine("Error: " + e.Message); } finally { File.Delete(tempFileName); } }
private static void StoreFiles(Stream zipStream, IReadOnlyList <KeyValuePair <string, string> > files) { var isZip64 = false; var count = files.Count; var isZip64Count = !GetLegacyUInt16(count, out var legacyCount); isZip64 = isZip64 || isZip64Count; var relativeNames = new byte[count][]; var offsets = new long[count]; var methods = new CompressionMethod[count]; var times = new ushort[count]; var dates = new ushort[count]; var crc32s = new int[count]; var compressedLengths = new long[count]; var uncompressedLengths = new long[count]; //local entries and actual data Console.Write("Writing files."); for (int i = 0; i < count; i++) { Console.Write($"\rWriting files: {i + 1}/{count}"); offsets[i] = zipStream.Position; var location = files[i].Value; var relativeName = files[i].Key; var relativeNameBytes = relativeNames[i] = MiscHelpers.Encoding.GetBytes(relativeName); using (var file = File.OpenRead(location)) { var dateTime = File.GetLastWriteTime(file.Name); var uncompressedLength = uncompressedLengths[i] = file.Length; var compressedLength = compressedLengths[i] = file.Length; var isZip64CompressedLength = !GetLegacyUInt32(compressedLength, out var legacyCompressedLength); var isZip64UncompressedLength = !GetLegacyUInt32(uncompressedLength, out var legacyUncompressedLength); isZip64 = isZip64 || isZip64CompressedLength || isZip64UncompressedLength; var header = new LocalHeader { Signature = LocalHeader.DefaultSignature, VersionNeeded = ZipVersion, Flags = 0, Compression = methods[i] = CompressionMethod.Store, Time = times[i] = MiscHelpers.GetDosTime(dateTime), Date = dates[i] = MiscHelpers.GetDosDate(dateTime), Crc32 = crc32s[i] = file.ComputeCrc32(uncompressedLength), CompressedSize = legacyCompressedLength, UncompressedSize = legacyUncompressedLength, NameLength = MiscHelpers.EnsureFitsUInt16(relativeNameBytes.Length), ExtraLength = 0, }; if (isZip64CompressedLength || isZip64UncompressedLength) { header.ExtraLength = 4; if (isZip64CompressedLength) { header.ExtraLength += 8; } if (isZip64UncompressedLength) { header.ExtraLength += 8; } } zipStream.WriteValue(header); zipStream.Write(relativeNameBytes, 0, relativeNameBytes.Length); //zip64 extra if (isZip64CompressedLength || isZip64UncompressedLength) { WriteExtraZip64Header(zipStream, header.ExtraLength); if (isZip64UncompressedLength) { zipStream.WriteValue(uncompressedLengths[i]); } if (isZip64CompressedLength) { zipStream.WriteValue(compressedLengths[i]); } } file.Position = 0; file.CopyTo(zipStream); } } Console.WriteLine(); var directoryStart = zipStream.Position; var isZip64DirectoryStart = !GetLegacyUInt32(directoryStart, out var legacyDirectoryStart); isZip64 = isZip64 || isZip64DirectoryStart; //central directory entries Console.Write("Writing central directory."); for (int i = 0; i < count; i++) { Console.Write($"\rWriting central directory: {i + 1}/{count}"); var isZip64CompressedLength = !GetLegacyUInt32(compressedLengths[i], out var legacyCompressedLength); var isZip64UncompressedLength = !GetLegacyUInt32(uncompressedLengths[i], out var legacyUncompressedLength); var isZip64LocalOffset = !GetLegacyUInt32(offsets[i], out var legacyOffset); isZip64 = isZip64 || isZip64CompressedLength || isZip64UncompressedLength || isZip64LocalOffset; var relativeName = relativeNames[i]; var header = new DirectoryHeader { Signature = DirectoryHeader.DefaultSignature, VersionMadeBy = ZipVersion, VersionNeeded = ZipVersion, Flags = 0, Compression = methods[i], Time = times[i], Date = dates[i], Crc32 = crc32s[i], CompressedSize = legacyCompressedLength, UncompressedSize = legacyUncompressedLength, NameLength = (ushort)relativeName.Length, //gets checked when writing local headers ExtraLength = 0, CommentLength = 0, DiskNumber = 0, InternalAttributes = 0, ExternalAttributes = 0, LocalHeaderOffset = legacyOffset, }; if (isZip64CompressedLength || isZip64UncompressedLength || isZip64LocalOffset) { header.ExtraLength = 4; if (isZip64CompressedLength) { header.ExtraLength += 8; } if (isZip64UncompressedLength) { header.ExtraLength += 8; } if (isZip64LocalOffset) { header.ExtraLength += 8; } } zipStream.WriteValue(header); zipStream.Write(relativeName, 0, relativeName.Length); //zip64 extra if (header.ExtraLength > 0) { WriteExtraZip64Header(zipStream, header.ExtraLength); if (isZip64UncompressedLength) { zipStream.WriteValue(uncompressedLengths[i]); } if (isZip64CompressedLength) { zipStream.WriteValue(compressedLengths[i]); } if (isZip64LocalOffset) { zipStream.WriteValue(offsets[i]); } } } Console.WriteLine(); var directoryEnd = zipStream.Position; var directorySize = directoryEnd - directoryStart; var isZip64DirectorySize = !GetLegacyUInt32(directorySize, out var legacyDirectorySize); isZip64 = isZip64 || isZip64DirectorySize; if (isZip64) { var eod64 = new EndOfDirectory64 { Signature = EndOfDirectory64.DefaultSignature, Size = (ulong)(MiscHelpers.SizeOf <EndOfDirectory64> () - 4 - 8), VersionMadeBy = ZipVersion, VersionNeeded = ZipVersion, DiskNumber = 0, DirectoryDiskNumber = 0, DiskRecords = (ulong)count, TotalRecords = (ulong)count, DirectorySize = (ulong)directorySize, DirectoryOffset = (ulong)directoryStart, }; zipStream.WriteValue(eod64); var locator64 = new EndOfDirectory64Locator { Signature = EndOfDirectory64Locator.DefaultSignature, EndOfDirectory64DiskNumber = 0, EndOfDirectory64Offset = (ulong)directoryEnd, TotalDisks = 1, }; zipStream.WriteValue(locator64); } //central directory end var eod = new EndOfDirectory { Signature = EndOfDirectory.DefaultSignature, DiskNumber = 0, DirectoryDiskNumber = 0, DiskRecords = legacyCount, TotalRecords = legacyCount, DirectorySize = legacyDirectorySize, DirectoryOffset = legacyDirectoryStart, CommentLength = 0, }; zipStream.WriteValue(eod); }
public void SetUp() { index = new Mock <IIndex <DirectoryItem> >(); directoryCache = new Mock <IDirectoryCache>(); allocationManager = new Mock <IAllocationManager>(); directoryCache.SetupGet(x => x.AllocationManager).Returns(allocationManager.Object); header = new DirectoryHeader(); indexBlockProvider = new Mock <IIndexBlockProvider>(); indexBlockProviderFactory = new Mock <IFactory <IIndexBlockProvider, int, ICommonAccessParameters> >(); indexBlockProviderFactory.Setup(x => x.Create(It.IsAny <int>(), It.IsAny <ICommonAccessParameters>())) .Returns(indexBlockProvider.Object); nameIndex = new Mock <IIndex <short> >(); nameIndex.SetupGet(x => x.BlockSize).Returns(10); indexFactory = new Mock <IFactory <IIndex <short>, IIndexBlockProvider, ICommonAccessParameters> >(); indexFactory.Setup(x => x.Create(It.IsAny <IIndexBlockProvider>(), It.IsAny <ICommonAccessParameters>())) .Returns(nameIndex.Object); nameBlockStream = new Mock <IBlockStream <short> >(); blockStreamFactory = new Mock <IFactory <IBlockStream <short>, IBlockProvider <short> > >(); blockStreamFactory.Setup(x => x.Create(It.IsAny <IBlockProvider <short> >())).Returns(nameBlockStream.Object); directoryIndex = new Mock <IIndex <DirectoryItem> >(); directoryIndexFactory = new Mock <IFactory <IIndex <DirectoryItem>, IIndexBlockProvider, ICommonAccessParameters> >(); directoryIndexFactory .Setup(x => x.Create(It.IsAny <IIndexBlockProvider>(), It.IsAny <ICommonAccessParameters>())) .Returns(directoryIndex.Object); directory = new Mock <IDirectory>(); directoryFactory = new Mock <IFactory <IDirectory, IIndex <DirectoryItem>, IDirectoryCache, DirectoryHeader> >(); directoryFactory .Setup(x => x.Create(It.IsAny <IIndex <DirectoryItem> >(), It.IsAny <IDirectoryCache>(), It.IsAny <DirectoryHeader>())).Returns(directory.Object); file = new Mock <IFile>(); fileFactory = new Mock <IFactory <IFile, IFileParameters, IDirectoryCache> >(); fileFactory.Setup(x => x.Create(It.IsAny <IFileParameters>(), It.IsAny <IDirectoryCache>())) .Returns(file.Object); directoryBlockStream = new Mock <IBlockStream <DirectoryItem> >(); directoryBlockStreamFactory = new Mock <IFactory <IBlockStream <DirectoryItem>, IBlockProvider <DirectoryItem> > >(); directoryBlockStreamFactory.Setup(x => x.Create(It.IsAny <IBlockProvider <DirectoryItem> >())) .Returns(directoryBlockStream.Object); deletionFile = new Mock <IDeletionFile>(); deletionFileFactory = new Mock <IFactory <IDeletionFile, IFileParameters, IDirectoryCache> >(); deletionFileFactory.Setup(x => x.Create(It.IsAny <IFileParameters>(), It.IsAny <IDirectoryCache>())) .Returns(deletionFile.Object); deletionDirectory = new Mock <IDeletionDirectory>(); deletionDirectoryFactory = new Mock <IFactory <IDeletionDirectory, int, IDirectoryCache> >(); deletionDirectoryFactory.Setup(x => x.Create(It.IsAny <int>(), It.IsAny <IDirectoryCache>())) .Returns(deletionDirectory.Object); SetupDirectoryEntries(new[] { new Entry { BlockIndex = 1, Created = new DateTime(2019, 4, 15), Updated = new DateTime(2019, 4, 15), Size = 1, Flags = DirectoryFlags.Directory, Name = "Dir 1" }, new Entry { BlockIndex = 2, Created = new DateTime(2019, 4, 14), Updated = new DateTime(2019, 4, 14), Size = 2, Flags = DirectoryFlags.Directory, Name = "Dir 2" }, new Entry { BlockIndex = 3, Created = new DateTime(2019, 4, 13), Updated = new DateTime(2019, 4, 11), Size = 2, Flags = DirectoryFlags.File, Name = "File 1" }, new Entry { BlockIndex = 3, Created = new DateTime(2019, 4, 13), Updated = new DateTime(2019, 4, 11), Size = 2, Flags = DirectoryFlags.File | DirectoryFlags.Deleted, Name = "Deleted File 1" } }); }