private void ReadNumberVector(List<byte[]> dataVector, int numFiles, Action<int, long?> action) { var defined = ReadOptionalBitVector(numFiles); using (CStreamSwitch streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); for (int i = 0; i < numFiles; i++) { if (defined[i]) action(i, checked((long)ReadUInt64())); else action(i, null); } } }
private void ReadHeader(CArchiveDatabaseEx db, IPasswordProvider getTextPassword) { Log.WriteLine("-- ReadHeader --"); Log.PushIndent(); try { BlockType? type = ReadId(); if (type == BlockType.ArchiveProperties) { ReadArchiveProperties(); type = ReadId(); } List<byte[]> dataVector = null; if (type == BlockType.AdditionalStreamsInfo) { dataVector = ReadAndDecodePackedStreams(db.StartPositionAfterHeader, getTextPassword); type = ReadId(); } List<long> unpackSizes; List<uint?> digests; if (type == BlockType.MainStreamsInfo) { ReadStreamsInfo(dataVector, out db.DataStartPosition, out db.PackSizes, out db.PackCRCs, out db.Folders, out db.NumUnpackStreamsVector, out unpackSizes, out digests); db.DataStartPosition += db.StartPositionAfterHeader; type = ReadId(); } else { unpackSizes = new List<long>(db.Folders.Count); digests = new List<uint?>(db.Folders.Count); db.NumUnpackStreamsVector = new List<int>(db.Folders.Count); for (int i = 0; i < db.Folders.Count; i++) { var folder = db.Folders[i]; unpackSizes.Add(folder.GetUnpackSize()); digests.Add(folder.UnpackCRC); db.NumUnpackStreamsVector.Add(1); } } db.Files.Clear(); if (type == BlockType.End) return; if (type != BlockType.FilesInfo) throw new InvalidDataException(); int numFiles = ReadNum(); Log.WriteLine("NumFiles: " + numFiles); db.Files = new List<CFileItem>(numFiles); for (int i = 0; i < numFiles; i++) db.Files.Add(new CFileItem()); BitVector emptyStreamVector = new BitVector(numFiles); BitVector emptyFileVector = null; BitVector antiFileVector = null; int numEmptyStreams = 0; for (;;) { type = ReadId(); if (type == BlockType.End) break; long size = checked((long)ReadNumber()); // TODO: throw invalid data on negative int oldPos = _currentReader.Offset; switch (type) { case BlockType.Name: using (var streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); Log.Write("FileNames:"); for (int i = 0; i < db.Files.Count; i++) { db.Files[i].Name = _currentReader.ReadString(); Log.Write(" " + db.Files[i].Name); } Log.WriteLine(); } break; case BlockType.WinAttributes: Log.Write("WinAttributes:"); ReadAttributeVector(dataVector, numFiles, delegate (int i, uint? attr) { db.Files[i].Attrib = attr; Log.Write(" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a")); }); Log.WriteLine(); break; case BlockType.EmptyStream: emptyStreamVector = ReadBitVector(numFiles); Log.Write("EmptyStream: "); for (int i = 0; i < emptyStreamVector.Length; i++) { if (emptyStreamVector[i]) { Log.Write("x"); numEmptyStreams++; } else { Log.Write("."); } } Log.WriteLine(); emptyFileVector = new BitVector(numEmptyStreams); antiFileVector = new BitVector(numEmptyStreams); break; case BlockType.EmptyFile: emptyFileVector = ReadBitVector(numEmptyStreams); Log.Write("EmptyFile: "); for (int i = 0; i < numEmptyStreams; i++) Log.Write(emptyFileVector[i] ? "x" : "."); Log.WriteLine(); break; case BlockType.Anti: antiFileVector = ReadBitVector(numEmptyStreams); Log.Write("Anti: "); for (int i = 0; i < numEmptyStreams; i++) Log.Write(antiFileVector[i] ? "x" : "."); Log.WriteLine(); break; case BlockType.StartPos: Log.Write("StartPos:"); ReadNumberVector(dataVector, numFiles, delegate (int i, long? startPos) { db.Files[i].StartPos = startPos; Log.Write(" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a")); }); Log.WriteLine(); break; case BlockType.CTime: Log.Write("CTime:"); ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time) { db.Files[i].CTime = time; Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }); Log.WriteLine(); break; case BlockType.ATime: Log.Write("ATime:"); ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time) { db.Files[i].ATime = time; Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }); Log.WriteLine(); break; case BlockType.MTime: Log.Write("MTime:"); ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time) { db.Files[i].MTime = time; Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }); Log.WriteLine(); break; case BlockType.Dummy: Log.Write("Dummy: " + size); for (long j = 0; j < size; j++) if (ReadByte() != 0) throw new InvalidDataException(); break; default: SkipData(size); break; } // since 0.3 record sizes must be correct bool checkRecordsSize = (db.MajorVersion > 0 || db.MinorVersion > 2); if (checkRecordsSize && _currentReader.Offset - oldPos != size) throw new InvalidDataException(); } int emptyFileIndex = 0; int sizeIndex = 0; for (int i = 0; i < numFiles; i++) { CFileItem file = db.Files[i]; file.HasStream = !emptyStreamVector[i]; if (file.HasStream) { file.IsDir = false; file.IsAnti = false; file.Size = unpackSizes[sizeIndex]; file.Crc = digests[sizeIndex]; sizeIndex++; } else { file.IsDir = !emptyFileVector[emptyFileIndex]; file.IsAnti = antiFileVector[emptyFileIndex]; emptyFileIndex++; file.Size = 0; file.Crc = null; } } } finally { Log.PopIndent(); } }
public void ReadDatabase(CArchiveDatabaseEx db, IPasswordProvider pass) { db.Clear(); db.MajorVersion = _header[6]; db.MinorVersion = _header[7]; if (db.MajorVersion != 0) throw new InvalidDataException(); uint crcFromArchive = DataReader.Get32(_header, 8); long nextHeaderOffset = (long)DataReader.Get64(_header, 0xC); long nextHeaderSize = (long)DataReader.Get64(_header, 0x14); uint nextHeaderCrc = DataReader.Get32(_header, 0x1C); uint crc = CRC.kInitCRC; crc = CRC.Update(crc, nextHeaderOffset); crc = CRC.Update(crc, nextHeaderSize); crc = CRC.Update(crc, nextHeaderCrc); crc = CRC.Finish(crc); if (crc != crcFromArchive) throw new InvalidDataException(); db.StartPositionAfterHeader = _streamOrigin + 0x20; // empty header is ok if (nextHeaderSize == 0) return; if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > Int32.MaxValue) throw new InvalidDataException(); if (nextHeaderOffset > _streamEnding - db.StartPositionAfterHeader) throw new InvalidDataException(); _stream.Seek(nextHeaderOffset, SeekOrigin.Current); byte[] header = new byte[nextHeaderSize]; _stream.ReadExact(header, 0, header.Length); if (CRC.Finish(CRC.Update(CRC.kInitCRC, header, 0, header.Length)) != nextHeaderCrc) throw new InvalidDataException(); using (CStreamSwitch streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, header); BlockType? type = ReadId(); if (type != BlockType.Header) { if (type != BlockType.EncodedHeader) throw new InvalidDataException(); var dataVector = ReadAndDecodePackedStreams(db.StartPositionAfterHeader, pass); // compressed header without content is odd but ok if (dataVector.Count == 0) return; if (dataVector.Count != 1) throw new InvalidDataException(); streamSwitch.Set(this, dataVector[0]); if (ReadId() != BlockType.Header) throw new InvalidDataException(); } ReadHeader(db, pass); } }
private void ReadAttributeVector(List<byte[]> dataVector, int numFiles, Action<int, uint?> action) { BitVector boolVector = ReadOptionalBitVector(numFiles); using (var streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); for (int i = 0; i < numFiles; i++) { if (boolVector[i]) action(i, ReadUInt32()); else action(i, null); } } }
private void ReadUnpackInfo(List<byte[]> dataVector, out List<CFolder> folders) { Log.WriteLine("-- ReadUnpackInfo --"); Log.PushIndent(); try { WaitAttribute(BlockType.Folder); int numFolders = ReadNum(); Log.WriteLine("NumFolders: {0}", numFolders); using (CStreamSwitch streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); //folders.Clear(); //folders.Reserve(numFolders); folders = new List<CFolder>(numFolders); int index = 0; for (int i = 0; i < numFolders; i++) { var f = new CFolder { FirstPackStreamId = index }; folders.Add(f); GetNextFolderItem(f); index += f.PackStreams.Count; } } WaitAttribute(BlockType.CodersUnpackSize); Log.WriteLine("UnpackSizes:"); for (int i = 0; i < numFolders; i++) { CFolder folder = folders[i]; Log.Write(" #" + i + ":"); int numOutStreams = folder.GetNumOutStreams(); for (int j = 0; j < numOutStreams; j++) { long size = checked((long)ReadNumber()); Log.Write(" " + size); folder.UnpackSizes.Add(size); } Log.WriteLine(); } for (;;) { BlockType? type = ReadId(); if (type == BlockType.End) return; if (type == BlockType.CRC) { List<uint?> crcs = ReadHashDigests(numFolders); for (int i = 0; i < numFolders; i++) folders[i].UnpackCRC = crcs[i]; continue; } SkipData(); } } finally { Log.PopIndent(); } }