private void ReadNumberVector(List<byte[]> dataVector, int numFiles, Action<int, long?> action) { var defined = ReadOptionalBitVector(numFiles); using (CStreamSwitch streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); for (int i = 0; i < numFiles; i++) { if (defined[i]) action(i, checked((long)ReadUInt64())); else action(i, null); } } }
private void ReadAttributeVector(List<byte[]> dataVector, int numFiles, Action<int, uint?> action) { BitVector boolVector = ReadOptionalBitVector(numFiles); using (var streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); for (int i = 0; i < numFiles; i++) { if (boolVector[i]) action(i, ReadUInt32()); else action(i, null); } } }
private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword) { #if DEBUG Log.WriteLine("-- ReadHeader --"); Log.PushIndent(); #endif try { BlockType? type = ReadId(); if (type == BlockType.ArchiveProperties) { ReadArchiveProperties(); type = ReadId(); } List<byte[]> dataVector = null; if (type == BlockType.AdditionalStreamsInfo) { dataVector = ReadAndDecodePackedStreams(db.StartPositionAfterHeader, getTextPassword); type = ReadId(); } List<long> unpackSizes; List<uint?> digests; if (type == BlockType.MainStreamsInfo) { ReadStreamsInfo(dataVector, out db.DataStartPosition, out db.PackSizes, out db.PackCRCs, out db.Folders, out db.NumUnpackStreamsVector, out unpackSizes, out digests); db.DataStartPosition += db.StartPositionAfterHeader; type = ReadId(); } else { unpackSizes = new List<long>(db.Folders.Count); digests = new List<uint?>(db.Folders.Count); db.NumUnpackStreamsVector = new List<int>(db.Folders.Count); for (int i = 0; i < db.Folders.Count; i++) { var folder = db.Folders[i]; unpackSizes.Add(folder.GetUnpackSize()); digests.Add(folder.UnpackCRC); db.NumUnpackStreamsVector.Add(1); } } db.Files.Clear(); if (type == BlockType.End) return; if (type != BlockType.FilesInfo) throw new InvalidOperationException(); int numFiles = ReadNum(); #if DEBUG Log.WriteLine("NumFiles: " + numFiles); #endif db.Files = new List<CFileItem>(numFiles); for (int i = 0; i < numFiles; i++) db.Files.Add(new CFileItem()); BitVector emptyStreamVector = new BitVector(numFiles); BitVector emptyFileVector = null; BitVector antiFileVector = null; int numEmptyStreams = 0; for (; ; ) { type = ReadId(); if (type == BlockType.End) break; long size = checked((long)ReadNumber()); // TODO: throw invalid data on negative int oldPos = _currentReader.Offset; switch (type) { case BlockType.Name: using (var streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); #if DEBUG Log.Write("FileNames:"); #endif for (int i = 0; i < db.Files.Count; i++) { db.Files[i].Name = _currentReader.ReadString(); #if DEBUG Log.Write(" " + db.Files[i].Name); #endif } #if DEBUG Log.WriteLine(); #endif } break; case BlockType.WinAttributes: #if DEBUG Log.Write("WinAttributes:"); #endif ReadAttributeVector(dataVector, numFiles, delegate(int i, uint? attr) { db.Files[i].Attrib = attr; #if DEBUG Log.Write(" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.EmptyStream: emptyStreamVector = ReadBitVector(numFiles); #if DEBUG Log.Write("EmptyStream: "); #endif for (int i = 0; i < emptyStreamVector.Length; i++) { if (emptyStreamVector[i]) { #if DEBUG Log.Write("x"); #endif numEmptyStreams++; } else { #if DEBUG Log.Write("."); #endif } } #if DEBUG Log.WriteLine(); #endif emptyFileVector = new BitVector(numEmptyStreams); antiFileVector = new BitVector(numEmptyStreams); break; case BlockType.EmptyFile: emptyFileVector = ReadBitVector(numEmptyStreams); #if DEBUG Log.Write("EmptyFile: "); for (int i = 0; i < numEmptyStreams; i++) Log.Write(emptyFileVector[i] ? "x" : "."); Log.WriteLine(); #endif break; case BlockType.Anti: antiFileVector = ReadBitVector(numEmptyStreams); #if DEBUG Log.Write("Anti: "); for (int i = 0; i < numEmptyStreams; i++) Log.Write(antiFileVector[i] ? "x" : "."); Log.WriteLine(); #endif break; case BlockType.StartPos: #if DEBUG Log.Write("StartPos:"); #endif ReadNumberVector(dataVector, numFiles, delegate(int i, long? startPos) { db.Files[i].StartPos = startPos; #if DEBUG Log.Write(" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.CTime: #if DEBUG Log.Write("CTime:"); #endif ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time) { db.Files[i].CTime = time; #if DEBUG Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.ATime: #if DEBUG Log.Write("ATime:"); #endif ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time) { db.Files[i].ATime = time; #if DEBUG Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.MTime: #if DEBUG Log.Write("MTime:"); #endif ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time) { db.Files[i].MTime = time; #if DEBUG Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.Dummy: #if DEBUG Log.Write("Dummy: " + size); #endif for (long j = 0; j < size; j++) if (ReadByte() != 0) throw new InvalidOperationException(); break; default: SkipData(size); break; } // since 0.3 record sizes must be correct bool checkRecordsSize = (db.MajorVersion > 0 || db.MinorVersion > 2); if (checkRecordsSize && _currentReader.Offset - oldPos != size) throw new InvalidOperationException(); } int emptyFileIndex = 0; int sizeIndex = 0; for (int i = 0; i < numFiles; i++) { CFileItem file = db.Files[i]; file.HasStream = !emptyStreamVector[i]; if (file.HasStream) { file.IsDir = false; file.IsAnti = false; file.Size = unpackSizes[sizeIndex]; file.Crc = digests[sizeIndex]; sizeIndex++; } else { file.IsDir = !emptyFileVector[emptyFileIndex]; file.IsAnti = antiFileVector[emptyFileIndex]; emptyFileIndex++; file.Size = 0; file.Crc = null; } } } finally { #if DEBUG Log.PopIndent(); #endif } }
private void ReadUnpackInfo(List<byte[]> dataVector, out List<CFolder> folders) { #if DEBUG Log.WriteLine("-- ReadUnpackInfo --"); Log.PushIndent(); #endif try { WaitAttribute(BlockType.Folder); int numFolders = ReadNum(); #if DEBUG Log.WriteLine("NumFolders: {0}", numFolders); #endif using (CStreamSwitch streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); //folders.Clear(); //folders.Reserve(numFolders); folders = new List<CFolder>(numFolders); int index = 0; for (int i = 0; i < numFolders; i++) { var f = new CFolder { FirstPackStreamId = index }; folders.Add(f); GetNextFolderItem(f); index += f.PackStreams.Count; } } WaitAttribute(BlockType.CodersUnpackSize); #if DEBUG Log.WriteLine("UnpackSizes:"); #endif for (int i = 0; i < numFolders; i++) { CFolder folder = folders[i]; #if DEBUG Log.Write(" #" + i + ":"); #endif int numOutStreams = folder.GetNumOutStreams(); for (int j = 0; j < numOutStreams; j++) { long size = checked((long)ReadNumber()); #if DEBUG Log.Write(" " + size); #endif folder.UnpackSizes.Add(size); } #if DEBUG Log.WriteLine(); #endif } for (; ; ) { BlockType? type = ReadId(); if (type == BlockType.End) return; if (type == BlockType.CRC) { List<uint?> crcs = ReadHashDigests(numFolders); for (int i = 0; i < numFolders; i++) folders[i].UnpackCRC = crcs[i]; continue; } SkipData(); } } finally { #if DEBUG Log.PopIndent(); #endif } }
public ArchiveDatabase ReadDatabase(IPasswordProvider pass) { var db = new ArchiveDatabase(); db.Clear(); db.MajorVersion = _header[6]; db.MinorVersion = _header[7]; if (db.MajorVersion != 0) throw new InvalidOperationException(); uint crcFromArchive = DataReader.Get32(_header, 8); long nextHeaderOffset = (long)DataReader.Get64(_header, 0xC); long nextHeaderSize = (long)DataReader.Get64(_header, 0x14); uint nextHeaderCrc = DataReader.Get32(_header, 0x1C); uint crc = CRC.kInitCRC; crc = CRC.Update(crc, nextHeaderOffset); crc = CRC.Update(crc, nextHeaderSize); crc = CRC.Update(crc, nextHeaderCrc); crc = CRC.Finish(crc); if (crc != crcFromArchive) throw new InvalidOperationException(); db.StartPositionAfterHeader = _streamOrigin + 0x20; // empty header is ok if (nextHeaderSize == 0) { db.Fill(); return db; } if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > Int32.MaxValue) throw new InvalidOperationException(); if (nextHeaderOffset > _streamEnding - db.StartPositionAfterHeader) throw new IndexOutOfRangeException(); _stream.Seek(nextHeaderOffset, SeekOrigin.Current); byte[] header = new byte[nextHeaderSize]; _stream.ReadExact(header, 0, header.Length); if (CRC.Finish(CRC.Update(CRC.kInitCRC, header, 0, header.Length)) != nextHeaderCrc) throw new InvalidOperationException(); using (CStreamSwitch streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, header); BlockType? type = ReadId(); if (type != BlockType.Header) { if (type != BlockType.EncodedHeader) throw new InvalidOperationException(); var dataVector = ReadAndDecodePackedStreams(db.StartPositionAfterHeader, pass); // compressed header without content is odd but ok if (dataVector.Count == 0) { db.Fill(); return db; } if (dataVector.Count != 1) throw new InvalidOperationException(); streamSwitch.Set(this, dataVector[0]); if (ReadId() != BlockType.Header) throw new InvalidOperationException(); } ReadHeader(db, pass); } db.Fill(); return db; }
private void ReadNumberVector(List<byte[]> dataVector, int numFiles, Action<int, long?> action) { BitVector vector = this.ReadOptionalBitVector(numFiles); using (CStreamSwitch switch2 = new CStreamSwitch()) { switch2.Set(this, dataVector); for (int i = 0; i < numFiles; i++) { if (vector[i]) { action(i, new long?((long) this.ReadUInt64())); } else { long? nullable = null; action(i, nullable); } } } }
private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword) { Action<int, uint?> action = null; Action<int, long?> action2 = null; Action<int, DateTime?> action3 = null; Action<int, DateTime?> action4 = null; Action<int, DateTime?> action5 = null; SharpCompress.Compressor.LZMA.Log.WriteLine("-- ReadHeader --"); SharpCompress.Compressor.LZMA.Log.PushIndent(" "); try { List<long> list2; List<uint?> list3; int num; long num6; int num7; bool flag2; BlockType? nullable = this.ReadId(); if (((BlockType) nullable) == BlockType.ArchiveProperties) { this.ReadArchiveProperties(); nullable = this.ReadId(); } List<byte[]> dataVector = null; if (((BlockType) nullable) == BlockType.AdditionalStreamsInfo) { dataVector = this.ReadAndDecodePackedStreams(db.StartPositionAfterHeader, getTextPassword); nullable = this.ReadId(); } if (((BlockType) nullable) == BlockType.MainStreamsInfo) { this.ReadStreamsInfo(dataVector, out db.DataStartPosition, out db.PackSizes, out db.PackCRCs, out db.Folders, out db.NumUnpackStreamsVector, out list2, out list3); db.DataStartPosition += db.StartPositionAfterHeader; nullable = this.ReadId(); } else { list2 = new List<long>(db.Folders.Count); list3 = new List<uint?>(db.Folders.Count); db.NumUnpackStreamsVector = new List<int>(db.Folders.Count); for (num = 0; num < db.Folders.Count; num++) { CFolder folder = db.Folders[num]; list2.Add(folder.GetUnpackSize()); list3.Add(folder.UnpackCRC); db.NumUnpackStreamsVector.Add(1); } } db.Files.Clear(); if (((BlockType) nullable) == BlockType.End) { return; } if (((BlockType) nullable) != BlockType.FilesInfo) { throw new InvalidOperationException(); } int capacity = this.ReadNum(); SharpCompress.Compressor.LZMA.Log.WriteLine("NumFiles: " + capacity); db.Files = new List<CFileItem>(capacity); num = 0; while (num < capacity) { db.Files.Add(new CFileItem()); num++; } BitVector vector = new BitVector(capacity); BitVector vector2 = null; BitVector vector3 = null; int length = 0; goto Label_06FC; Label_02DA: nullable = this.ReadId(); if (((BlockType) nullable) == BlockType.End) { goto Label_0704; } long size = (long) this.ReadNumber(); int offset = this._currentReader.Offset; BlockType valueOrDefault = nullable.GetValueOrDefault(); if (nullable.HasValue) { switch (valueOrDefault) { case BlockType.EmptyStream: vector = this.ReadBitVector(capacity); SharpCompress.Compressor.LZMA.Log.Write("EmptyStream: "); num = 0; goto Label_04AD; case BlockType.EmptyFile: vector2 = this.ReadBitVector(length); SharpCompress.Compressor.LZMA.Log.Write("EmptyFile: "); num = 0; goto Label_0519; case BlockType.Anti: vector3 = this.ReadBitVector(length); SharpCompress.Compressor.LZMA.Log.Write("Anti: "); num = 0; goto Label_056E; case BlockType.Name: goto Label_0370; case BlockType.CTime: goto Label_05BC; case BlockType.ATime: goto Label_05F3; case BlockType.MTime: goto Label_062A; case BlockType.WinAttributes: goto Label_0420; case BlockType.StartPos: goto Label_0585; case BlockType.Dummy: SharpCompress.Compressor.LZMA.Log.Write("Dummy: " + size); num6 = 0L; goto Label_0697; } } goto Label_06A5; Label_0370: using (CStreamSwitch switch2 = new CStreamSwitch()) { switch2.Set(this, dataVector); SharpCompress.Compressor.LZMA.Log.Write("FileNames:"); num = 0; while (num < db.Files.Count) { db.Files[num].Name = this._currentReader.ReadString(); SharpCompress.Compressor.LZMA.Log.Write(" " + db.Files[num].Name); num++; } SharpCompress.Compressor.LZMA.Log.WriteLine(); } goto Label_06B0; Label_0420: SharpCompress.Compressor.LZMA.Log.Write("WinAttributes:"); if (action == null) { action = delegate (int i, uint? attr) { db.Files[i].Attrib = attr; SharpCompress.Compressor.LZMA.Log.Write(" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a")); }; } this.ReadAttributeVector(dataVector, capacity, action); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_0471: if (vector[num]) { SharpCompress.Compressor.LZMA.Log.Write("x"); length++; } else { SharpCompress.Compressor.LZMA.Log.Write("."); } num++; Label_04AD: if (num < vector.Length) { goto Label_0471; } SharpCompress.Compressor.LZMA.Log.WriteLine(); vector2 = new BitVector(length); vector3 = new BitVector(length); goto Label_06B0; Label_04F5: SharpCompress.Compressor.LZMA.Log.Write(vector2[num] ? "x" : "."); num++; Label_0519: if (num < length) { goto Label_04F5; } SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_054A: SharpCompress.Compressor.LZMA.Log.Write(vector3[num] ? "x" : "."); num++; Label_056E: if (num < length) { goto Label_054A; } SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_0585: SharpCompress.Compressor.LZMA.Log.Write("StartPos:"); if (action2 == null) { action2 = delegate (int i, long? startPos) { db.Files[i].StartPos = startPos; SharpCompress.Compressor.LZMA.Log.Write(" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a")); }; } this.ReadNumberVector(dataVector, capacity, action2); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_05BC: SharpCompress.Compressor.LZMA.Log.Write("CTime:"); if (action3 == null) { action3 = delegate (int i, DateTime? time) { db.Files[i].CTime = time; SharpCompress.Compressor.LZMA.Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }; } this.ReadDateTimeVector(dataVector, capacity, action3); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_05F3: SharpCompress.Compressor.LZMA.Log.Write("ATime:"); if (action4 == null) { action4 = delegate (int i, DateTime? time) { db.Files[i].ATime = time; SharpCompress.Compressor.LZMA.Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }; } this.ReadDateTimeVector(dataVector, capacity, action4); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_062A: SharpCompress.Compressor.LZMA.Log.Write("MTime:"); if (action5 == null) { action5 = delegate (int i, DateTime? time) { db.Files[i].MTime = time; SharpCompress.Compressor.LZMA.Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }; } this.ReadDateTimeVector(dataVector, capacity, action5); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_067B: if (this.ReadByte() != 0) { throw new InvalidOperationException(); } num6 += 1L; Label_0697: if (num6 < size) { goto Label_067B; } goto Label_06B0; Label_06A5: this.SkipData(size); Label_06B0: if (((db.MajorVersion > 0) || (db.MinorVersion > 2)) && ((this._currentReader.Offset - offset) != size)) { throw new InvalidOperationException(); } Label_06FC: flag2 = true; goto Label_02DA; Label_0704: num7 = 0; int num8 = 0; for (num = 0; num < capacity; num++) { CFileItem item = db.Files[num]; item.HasStream = !vector[num]; if (item.HasStream) { item.IsDir = false; item.IsAnti = false; item.Size = list2[num8]; item.Crc = list3[num8]; num8++; } else { item.IsDir = !vector2[num7]; item.IsAnti = vector3[num7]; num7++; item.Size = 0L; item.Crc = null; } } } finally { SharpCompress.Compressor.LZMA.Log.PopIndent(); } }
private void ReadAttributeVector(List<byte[]> dataVector, int numFiles, Action<int, uint?> action) { BitVector vector = this.ReadOptionalBitVector(numFiles); using (CStreamSwitch switch2 = new CStreamSwitch()) { switch2.Set(this, dataVector); for (int i = 0; i < numFiles; i++) { if (vector[i]) { action(i, new uint?(this.ReadUInt32())); } else { uint? nullable = null; action(i, nullable); } } } }
public ArchiveDatabase ReadDatabase(IPasswordProvider pass) { ArchiveDatabase db = new ArchiveDatabase(); db.Clear(); db.MajorVersion = this._header[6]; db.MinorVersion = this._header[7]; if (db.MajorVersion != 0) { throw new InvalidOperationException(); } uint num = DataReader.Get32(this._header, 8); long num2 = (long) DataReader.Get64(this._header, 12); long num3 = (long) DataReader.Get64(this._header, 20); uint num4 = DataReader.Get32(this._header, 0x1c); uint maxValue = uint.MaxValue; if (CRC.Finish(CRC.Update(CRC.Update(CRC.Update(maxValue, num2), num3), num4)) != num) { throw new InvalidOperationException(); } db.StartPositionAfterHeader = this._streamOrigin + 0x20L; if (num3 == 0L) { db.Fill(); return db; } if (((num2 < 0L) || (num3 < 0L)) || (num3 > 0x7fffffffL)) { throw new InvalidOperationException(); } if (num2 > (this._streamEnding - db.StartPositionAfterHeader)) { throw new IndexOutOfRangeException(); } this._stream.Seek(num2, SeekOrigin.Current); byte[] buffer = new byte[num3]; Utils.ReadExact(this._stream, buffer, 0, buffer.Length); if (CRC.Finish(CRC.Update(uint.MaxValue, buffer, 0, buffer.Length)) != num4) { throw new InvalidOperationException(); } using (CStreamSwitch switch2 = new CStreamSwitch()) { switch2.Set(this, buffer); BlockType? nullable = this.ReadId(); if (((BlockType) nullable) != BlockType.Header) { if (((BlockType) nullable) != BlockType.EncodedHeader) { throw new InvalidOperationException(); } List<byte[]> list = this.ReadAndDecodePackedStreams(db.StartPositionAfterHeader, pass); if (list.Count == 0) { db.Fill(); return db; } if (list.Count != 1) { throw new InvalidOperationException(); } switch2.Set(this, list[0]); if (((BlockType) this.ReadId()) != BlockType.Header) { throw new InvalidOperationException(); } } this.ReadHeader(db, pass); } db.Fill(); return db; }
private void ReadUnpackInfo(List<byte[]> dataVector, out List<CFolder> folders) { SharpCompress.Compressor.LZMA.Log.WriteLine("-- ReadUnpackInfo --"); SharpCompress.Compressor.LZMA.Log.PushIndent(" "); try { int num3; BlockType? nullable; bool flag; this.WaitAttribute(BlockType.Folder); int capacity = this.ReadNum(); SharpCompress.Compressor.LZMA.Log.WriteLine("NumFolders: {0}", new object[] { capacity }); using (CStreamSwitch switch2 = new CStreamSwitch()) { switch2.Set(this, dataVector); folders = new List<CFolder>(capacity); int num2 = 0; num3 = 0; while (num3 < capacity) { CFolder folder2 = new CFolder(); folder2.FirstPackStreamId = num2; CFolder item = folder2; folders.Add(item); this.GetNextFolderItem(item); num2 += item.PackStreams.Count; num3++; } } this.WaitAttribute(BlockType.CodersUnpackSize); SharpCompress.Compressor.LZMA.Log.WriteLine("UnpackSizes:"); for (num3 = 0; num3 < capacity; num3++) { CFolder folder3 = folders[num3]; SharpCompress.Compressor.LZMA.Log.Write(" #" + num3 + ":"); int numOutStreams = folder3.GetNumOutStreams(); for (int i = 0; i < numOutStreams; i++) { long num6 = (long) this.ReadNumber(); SharpCompress.Compressor.LZMA.Log.Write(" " + num6); folder3.UnpackSizes.Add(num6); } SharpCompress.Compressor.LZMA.Log.WriteLine(); } goto Label_01F9; Label_016F: nullable = this.ReadId(); if (((BlockType) nullable) == BlockType.End) { return; } if (((BlockType) nullable) == BlockType.CRC) { List<uint?> list = this.ReadHashDigests(capacity); for (num3 = 0; num3 < capacity; num3++) { folders[num3].UnpackCRC = list[num3]; } } else { this.SkipData(); } Label_01F9: flag = true; goto Label_016F; } finally { SharpCompress.Compressor.LZMA.Log.PopIndent(); } }