internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry) { this.stream = stream; this.database = database; Index = index; Header = fileEntry; if (Header.HasStream) { Folder = database.Folders[database.FileIndexToFolderIndexMap[index]]; } }
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry, ArchiveEncoding archiveEncoding) : base(archiveEncoding) { _stream = stream; _database = database; Index = index; Header = fileEntry; if (Header.HasStream) { Folder = database._folders[database._fileIndexToFolderIndexMap[index]]; } }
private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword) { #if DEBUG Log.WriteLine("-- ReadHeader --"); Log.PushIndent(); #endif try { BlockType? type = ReadId(); if (type == BlockType.ArchiveProperties) { ReadArchiveProperties(); type = ReadId(); } List<byte[]> dataVector = null; if (type == BlockType.AdditionalStreamsInfo) { dataVector = ReadAndDecodePackedStreams(db.StartPositionAfterHeader, getTextPassword); type = ReadId(); } List<long> unpackSizes; List<uint?> digests; if (type == BlockType.MainStreamsInfo) { ReadStreamsInfo(dataVector, out db.DataStartPosition, out db.PackSizes, out db.PackCRCs, out db.Folders, out db.NumUnpackStreamsVector, out unpackSizes, out digests); db.DataStartPosition += db.StartPositionAfterHeader; type = ReadId(); } else { unpackSizes = new List<long>(db.Folders.Count); digests = new List<uint?>(db.Folders.Count); db.NumUnpackStreamsVector = new List<int>(db.Folders.Count); for (int i = 0; i < db.Folders.Count; i++) { var folder = db.Folders[i]; unpackSizes.Add(folder.GetUnpackSize()); digests.Add(folder.UnpackCRC); db.NumUnpackStreamsVector.Add(1); } } db.Files.Clear(); if (type == BlockType.End) return; if (type != BlockType.FilesInfo) throw new InvalidOperationException(); int numFiles = ReadNum(); #if DEBUG Log.WriteLine("NumFiles: " + numFiles); #endif db.Files = new List<CFileItem>(numFiles); for (int i = 0; i < numFiles; i++) db.Files.Add(new CFileItem()); BitVector emptyStreamVector = new BitVector(numFiles); BitVector emptyFileVector = null; BitVector antiFileVector = null; int numEmptyStreams = 0; for (; ; ) { type = ReadId(); if (type == BlockType.End) break; long size = checked((long)ReadNumber()); // TODO: throw invalid data on negative int oldPos = _currentReader.Offset; switch (type) { case BlockType.Name: using (var streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, dataVector); #if DEBUG Log.Write("FileNames:"); #endif for (int i = 0; i < db.Files.Count; i++) { db.Files[i].Name = _currentReader.ReadString(); #if DEBUG Log.Write(" " + db.Files[i].Name); #endif } #if DEBUG Log.WriteLine(); #endif } break; case BlockType.WinAttributes: #if DEBUG Log.Write("WinAttributes:"); #endif ReadAttributeVector(dataVector, numFiles, delegate(int i, uint? attr) { db.Files[i].Attrib = attr; #if DEBUG Log.Write(" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.EmptyStream: emptyStreamVector = ReadBitVector(numFiles); #if DEBUG Log.Write("EmptyStream: "); #endif for (int i = 0; i < emptyStreamVector.Length; i++) { if (emptyStreamVector[i]) { #if DEBUG Log.Write("x"); #endif numEmptyStreams++; } else { #if DEBUG Log.Write("."); #endif } } #if DEBUG Log.WriteLine(); #endif emptyFileVector = new BitVector(numEmptyStreams); antiFileVector = new BitVector(numEmptyStreams); break; case BlockType.EmptyFile: emptyFileVector = ReadBitVector(numEmptyStreams); #if DEBUG Log.Write("EmptyFile: "); for (int i = 0; i < numEmptyStreams; i++) Log.Write(emptyFileVector[i] ? "x" : "."); Log.WriteLine(); #endif break; case BlockType.Anti: antiFileVector = ReadBitVector(numEmptyStreams); #if DEBUG Log.Write("Anti: "); for (int i = 0; i < numEmptyStreams; i++) Log.Write(antiFileVector[i] ? "x" : "."); Log.WriteLine(); #endif break; case BlockType.StartPos: #if DEBUG Log.Write("StartPos:"); #endif ReadNumberVector(dataVector, numFiles, delegate(int i, long? startPos) { db.Files[i].StartPos = startPos; #if DEBUG Log.Write(" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.CTime: #if DEBUG Log.Write("CTime:"); #endif ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time) { db.Files[i].CTime = time; #if DEBUG Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.ATime: #if DEBUG Log.Write("ATime:"); #endif ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time) { db.Files[i].ATime = time; #if DEBUG Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.MTime: #if DEBUG Log.Write("MTime:"); #endif ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time) { db.Files[i].MTime = time; #if DEBUG Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); #endif }); #if DEBUG Log.WriteLine(); #endif break; case BlockType.Dummy: #if DEBUG Log.Write("Dummy: " + size); #endif for (long j = 0; j < size; j++) if (ReadByte() != 0) throw new InvalidOperationException(); break; default: SkipData(size); break; } // since 0.3 record sizes must be correct bool checkRecordsSize = (db.MajorVersion > 0 || db.MinorVersion > 2); if (checkRecordsSize && _currentReader.Offset - oldPos != size) throw new InvalidOperationException(); } int emptyFileIndex = 0; int sizeIndex = 0; for (int i = 0; i < numFiles; i++) { CFileItem file = db.Files[i]; file.HasStream = !emptyStreamVector[i]; if (file.HasStream) { file.IsDir = false; file.IsAnti = false; file.Size = unpackSizes[sizeIndex]; file.Crc = digests[sizeIndex]; sizeIndex++; } else { file.IsDir = !emptyFileVector[emptyFileIndex]; file.IsAnti = antiFileVector[emptyFileIndex]; emptyFileIndex++; file.Size = 0; file.Crc = null; } } } finally { #if DEBUG Log.PopIndent(); #endif } }
public void Extract(ArchiveDatabase _db, int[] indices, IPasswordProvider pw) { int numItems; bool allFilesMode = (indices == null); if (allFilesMode) numItems = _db.Files.Count; else numItems = indices.Length; if (numItems == 0) return; List<CExtractFolderInfo> extractFolderInfoVector = new List<CExtractFolderInfo>(); for (int i = 0; i < numItems; i++) { int fileIndex = allFilesMode ? i : indices[i]; int folderIndex = _db.FileIndexToFolderIndexMap[fileIndex]; if (folderIndex == -1) { extractFolderInfoVector.Add(new CExtractFolderInfo(fileIndex, -1)); continue; } if (extractFolderInfoVector.Count == 0 || folderIndex != extractFolderInfoVector.Last().FolderIndex) extractFolderInfoVector.Add(new CExtractFolderInfo(-1, folderIndex)); CExtractFolderInfo efi = extractFolderInfoVector.Last(); int startIndex = _db.FolderStartFileIndex[folderIndex]; for (int index = efi.ExtractStatuses.Count; index <= fileIndex - startIndex; index++) efi.ExtractStatuses.Add(index == fileIndex - startIndex); } foreach (CExtractFolderInfo efi in extractFolderInfoVector) { int startIndex; if (efi.FileIndex != -1) startIndex = efi.FileIndex; else startIndex = _db.FolderStartFileIndex[efi.FolderIndex]; var outStream = new FolderUnpackStream(_db, 0, startIndex, efi.ExtractStatuses); if (efi.FileIndex != -1) continue; int folderIndex = efi.FolderIndex; CFolder folderInfo = _db.Folders[folderIndex]; int packStreamIndex = _db.Folders[folderIndex].FirstPackStreamId; long folderStartPackPos = _db.GetFolderStreamPos(folderInfo, 0); List<long> packSizes = new List<long>(); for (int j = 0; j < folderInfo.PackStreams.Count; j++) packSizes.Add(_db.PackSizes[packStreamIndex + j]); // TODO: If the decoding fails the last file may be extracted incompletely. Delete it? Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo, pw); byte[] buffer = new byte[4 << 10]; for (; ; ) { int processed = s.Read(buffer, 0, buffer.Length); if (processed == 0) break; outStream.Write(buffer, 0, processed); } } }
private Stream GetCachedDecoderStream(ArchiveDatabase _db, int folderIndex, IPasswordProvider pw) { Stream s; if (!_cachedStreams.TryGetValue(folderIndex, out s)) { CFolder folderInfo = _db.Folders[folderIndex]; int packStreamIndex = _db.Folders[folderIndex].FirstPackStreamId; long folderStartPackPos = _db.GetFolderStreamPos(folderInfo, 0); List<long> packSizes = new List<long>(); for (int j = 0; j < folderInfo.PackStreams.Count; j++) packSizes.Add(_db.PackSizes[packStreamIndex + j]); s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo, pw); _cachedStreams.Add(folderIndex, s); } return s; }
public ArchiveDatabase ReadDatabase(IPasswordProvider pass) { var db = new ArchiveDatabase(); db.Clear(); db.MajorVersion = _header[6]; db.MinorVersion = _header[7]; if (db.MajorVersion != 0) throw new InvalidOperationException(); uint crcFromArchive = DataReader.Get32(_header, 8); long nextHeaderOffset = (long)DataReader.Get64(_header, 0xC); long nextHeaderSize = (long)DataReader.Get64(_header, 0x14); uint nextHeaderCrc = DataReader.Get32(_header, 0x1C); uint crc = CRC.kInitCRC; crc = CRC.Update(crc, nextHeaderOffset); crc = CRC.Update(crc, nextHeaderSize); crc = CRC.Update(crc, nextHeaderCrc); crc = CRC.Finish(crc); if (crc != crcFromArchive) throw new InvalidOperationException(); db.StartPositionAfterHeader = _streamOrigin + 0x20; // empty header is ok if (nextHeaderSize == 0) { db.Fill(); return db; } if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > Int32.MaxValue) throw new InvalidOperationException(); if (nextHeaderOffset > _streamEnding - db.StartPositionAfterHeader) throw new IndexOutOfRangeException(); _stream.Seek(nextHeaderOffset, SeekOrigin.Current); byte[] header = new byte[nextHeaderSize]; _stream.ReadExact(header, 0, header.Length); if (CRC.Finish(CRC.Update(CRC.kInitCRC, header, 0, header.Length)) != nextHeaderCrc) throw new InvalidOperationException(); using (CStreamSwitch streamSwitch = new CStreamSwitch()) { streamSwitch.Set(this, header); BlockType? type = ReadId(); if (type != BlockType.Header) { if (type != BlockType.EncodedHeader) throw new InvalidOperationException(); var dataVector = ReadAndDecodePackedStreams(db.StartPositionAfterHeader, pass); // compressed header without content is odd but ok if (dataVector.Count == 0) { db.Fill(); return db; } if (dataVector.Count != 1) throw new InvalidOperationException(); streamSwitch.Set(this, dataVector[0]); if (ReadId() != BlockType.Header) throw new InvalidOperationException(); } ReadHeader(db, pass); } db.Fill(); return db; }
public Stream OpenStream(ArchiveDatabase _db, int fileIndex, IPasswordProvider pw) { int folderIndex = _db.FileIndexToFolderIndexMap[fileIndex]; int numFilesInFolder = _db.NumUnpackStreamsVector[folderIndex]; int firstFileIndex = _db.FolderStartFileIndex[folderIndex]; if (firstFileIndex > fileIndex || fileIndex - firstFileIndex >= numFilesInFolder) throw new InvalidOperationException(); int skipCount = fileIndex - firstFileIndex; long skipSize = 0; for (int i = 0; i < skipCount; i++) skipSize += _db.Files[firstFileIndex + i].Size; Stream s = GetCachedDecoderStream(_db, folderIndex, pw); s.Position = skipSize; return new ReadOnlySubStream(s, _db.Files[fileIndex].Size); }
public FolderUnpackStream(ArchiveDatabase db, int p, int startIndex, List<bool> list) { this._db = db; this._startIndex = startIndex; this._extractStatuses = list; }
public IEnumerable<CFileItem> GetFiles(ArchiveDatabase db) { return db.Files; }
public int GetFileIndex(ArchiveDatabase db, CFileItem item) { return db.Files.IndexOf(item); }
private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword) { Action<int, uint?> action = null; Action<int, long?> action2 = null; Action<int, DateTime?> action3 = null; Action<int, DateTime?> action4 = null; Action<int, DateTime?> action5 = null; SharpCompress.Compressor.LZMA.Log.WriteLine("-- ReadHeader --"); SharpCompress.Compressor.LZMA.Log.PushIndent(" "); try { List<long> list2; List<uint?> list3; int num; long num6; int num7; bool flag2; BlockType? nullable = this.ReadId(); if (((BlockType) nullable) == BlockType.ArchiveProperties) { this.ReadArchiveProperties(); nullable = this.ReadId(); } List<byte[]> dataVector = null; if (((BlockType) nullable) == BlockType.AdditionalStreamsInfo) { dataVector = this.ReadAndDecodePackedStreams(db.StartPositionAfterHeader, getTextPassword); nullable = this.ReadId(); } if (((BlockType) nullable) == BlockType.MainStreamsInfo) { this.ReadStreamsInfo(dataVector, out db.DataStartPosition, out db.PackSizes, out db.PackCRCs, out db.Folders, out db.NumUnpackStreamsVector, out list2, out list3); db.DataStartPosition += db.StartPositionAfterHeader; nullable = this.ReadId(); } else { list2 = new List<long>(db.Folders.Count); list3 = new List<uint?>(db.Folders.Count); db.NumUnpackStreamsVector = new List<int>(db.Folders.Count); for (num = 0; num < db.Folders.Count; num++) { CFolder folder = db.Folders[num]; list2.Add(folder.GetUnpackSize()); list3.Add(folder.UnpackCRC); db.NumUnpackStreamsVector.Add(1); } } db.Files.Clear(); if (((BlockType) nullable) == BlockType.End) { return; } if (((BlockType) nullable) != BlockType.FilesInfo) { throw new InvalidOperationException(); } int capacity = this.ReadNum(); SharpCompress.Compressor.LZMA.Log.WriteLine("NumFiles: " + capacity); db.Files = new List<CFileItem>(capacity); num = 0; while (num < capacity) { db.Files.Add(new CFileItem()); num++; } BitVector vector = new BitVector(capacity); BitVector vector2 = null; BitVector vector3 = null; int length = 0; goto Label_06FC; Label_02DA: nullable = this.ReadId(); if (((BlockType) nullable) == BlockType.End) { goto Label_0704; } long size = (long) this.ReadNumber(); int offset = this._currentReader.Offset; BlockType valueOrDefault = nullable.GetValueOrDefault(); if (nullable.HasValue) { switch (valueOrDefault) { case BlockType.EmptyStream: vector = this.ReadBitVector(capacity); SharpCompress.Compressor.LZMA.Log.Write("EmptyStream: "); num = 0; goto Label_04AD; case BlockType.EmptyFile: vector2 = this.ReadBitVector(length); SharpCompress.Compressor.LZMA.Log.Write("EmptyFile: "); num = 0; goto Label_0519; case BlockType.Anti: vector3 = this.ReadBitVector(length); SharpCompress.Compressor.LZMA.Log.Write("Anti: "); num = 0; goto Label_056E; case BlockType.Name: goto Label_0370; case BlockType.CTime: goto Label_05BC; case BlockType.ATime: goto Label_05F3; case BlockType.MTime: goto Label_062A; case BlockType.WinAttributes: goto Label_0420; case BlockType.StartPos: goto Label_0585; case BlockType.Dummy: SharpCompress.Compressor.LZMA.Log.Write("Dummy: " + size); num6 = 0L; goto Label_0697; } } goto Label_06A5; Label_0370: using (CStreamSwitch switch2 = new CStreamSwitch()) { switch2.Set(this, dataVector); SharpCompress.Compressor.LZMA.Log.Write("FileNames:"); num = 0; while (num < db.Files.Count) { db.Files[num].Name = this._currentReader.ReadString(); SharpCompress.Compressor.LZMA.Log.Write(" " + db.Files[num].Name); num++; } SharpCompress.Compressor.LZMA.Log.WriteLine(); } goto Label_06B0; Label_0420: SharpCompress.Compressor.LZMA.Log.Write("WinAttributes:"); if (action == null) { action = delegate (int i, uint? attr) { db.Files[i].Attrib = attr; SharpCompress.Compressor.LZMA.Log.Write(" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a")); }; } this.ReadAttributeVector(dataVector, capacity, action); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_0471: if (vector[num]) { SharpCompress.Compressor.LZMA.Log.Write("x"); length++; } else { SharpCompress.Compressor.LZMA.Log.Write("."); } num++; Label_04AD: if (num < vector.Length) { goto Label_0471; } SharpCompress.Compressor.LZMA.Log.WriteLine(); vector2 = new BitVector(length); vector3 = new BitVector(length); goto Label_06B0; Label_04F5: SharpCompress.Compressor.LZMA.Log.Write(vector2[num] ? "x" : "."); num++; Label_0519: if (num < length) { goto Label_04F5; } SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_054A: SharpCompress.Compressor.LZMA.Log.Write(vector3[num] ? "x" : "."); num++; Label_056E: if (num < length) { goto Label_054A; } SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_0585: SharpCompress.Compressor.LZMA.Log.Write("StartPos:"); if (action2 == null) { action2 = delegate (int i, long? startPos) { db.Files[i].StartPos = startPos; SharpCompress.Compressor.LZMA.Log.Write(" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a")); }; } this.ReadNumberVector(dataVector, capacity, action2); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_05BC: SharpCompress.Compressor.LZMA.Log.Write("CTime:"); if (action3 == null) { action3 = delegate (int i, DateTime? time) { db.Files[i].CTime = time; SharpCompress.Compressor.LZMA.Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }; } this.ReadDateTimeVector(dataVector, capacity, action3); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_05F3: SharpCompress.Compressor.LZMA.Log.Write("ATime:"); if (action4 == null) { action4 = delegate (int i, DateTime? time) { db.Files[i].ATime = time; SharpCompress.Compressor.LZMA.Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }; } this.ReadDateTimeVector(dataVector, capacity, action4); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_062A: SharpCompress.Compressor.LZMA.Log.Write("MTime:"); if (action5 == null) { action5 = delegate (int i, DateTime? time) { db.Files[i].MTime = time; SharpCompress.Compressor.LZMA.Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); }; } this.ReadDateTimeVector(dataVector, capacity, action5); SharpCompress.Compressor.LZMA.Log.WriteLine(); goto Label_06B0; Label_067B: if (this.ReadByte() != 0) { throw new InvalidOperationException(); } num6 += 1L; Label_0697: if (num6 < size) { goto Label_067B; } goto Label_06B0; Label_06A5: this.SkipData(size); Label_06B0: if (((db.MajorVersion > 0) || (db.MinorVersion > 2)) && ((this._currentReader.Offset - offset) != size)) { throw new InvalidOperationException(); } Label_06FC: flag2 = true; goto Label_02DA; Label_0704: num7 = 0; int num8 = 0; for (num = 0; num < capacity; num++) { CFileItem item = db.Files[num]; item.HasStream = !vector[num]; if (item.HasStream) { item.IsDir = false; item.IsAnti = false; item.Size = list2[num8]; item.Crc = list3[num8]; num8++; } else { item.IsDir = !vector2[num7]; item.IsAnti = vector3[num7]; num7++; item.Size = 0L; item.Crc = null; } } } finally { SharpCompress.Compressor.LZMA.Log.PopIndent(); } }
public void Extract(ArchiveDatabase _db, int[] indices, IPasswordProvider pw) { int count; bool flag = indices == null; if (flag) { count = _db.Files.Count; } else { count = indices.Length; } if (count != 0) { int folderIndex; int num5; List<CExtractFolderInfo> source = new List<CExtractFolderInfo>(); for (int i = 0; i < count; i++) { int fileIndex = flag ? i : indices[i]; folderIndex = _db.FileIndexToFolderIndexMap[fileIndex]; if (folderIndex == -1) { source.Add(new CExtractFolderInfo(fileIndex, -1)); } else { if ((source.Count == 0) || (folderIndex != Enumerable.Last<CExtractFolderInfo>(source).FolderIndex)) { source.Add(new CExtractFolderInfo(-1, folderIndex)); } CExtractFolderInfo info = Enumerable.Last<CExtractFolderInfo>(source); num5 = _db.FolderStartFileIndex[folderIndex]; for (int j = info.ExtractStatuses.Count; j <= (fileIndex - num5); j++) { info.ExtractStatuses.Add(j == (fileIndex - num5)); } } } foreach (CExtractFolderInfo info in source) { int num10; bool flag2; if (info.FileIndex != -1) { num5 = info.FileIndex; } else { num5 = _db.FolderStartFileIndex[info.FolderIndex]; } FolderUnpackStream stream = new FolderUnpackStream(_db, 0, num5, info.ExtractStatuses); if (info.FileIndex != -1) { continue; } folderIndex = info.FolderIndex; CFolder folder = _db.Folders[folderIndex]; int firstPackStreamId = _db.Folders[folderIndex].FirstPackStreamId; long folderStreamPos = _db.GetFolderStreamPos(folder, 0); List<long> list2 = new List<long>(); for (int k = 0; k < folder.PackStreams.Count; k++) { list2.Add(_db.PackSizes[firstPackStreamId + k]); } Stream stream2 = DecoderStreamHelper.CreateDecoderStream(this._stream, folderStreamPos, list2.ToArray(), folder, pw); byte[] buffer = new byte[0x1000]; goto Label_0252; Label_0223: num10 = stream2.Read(buffer, 0, buffer.Length); if (num10 == 0) { continue; } stream.Write(buffer, 0, num10); Label_0252: flag2 = true; goto Label_0223; } } }
private Stream GetCachedDecoderStream(ArchiveDatabase _db, int folderIndex, IPasswordProvider pw) { Stream stream; if (!this._cachedStreams.TryGetValue(folderIndex, out stream)) { CFolder folder = _db.Folders[folderIndex]; int firstPackStreamId = _db.Folders[folderIndex].FirstPackStreamId; long folderStreamPos = _db.GetFolderStreamPos(folder, 0); List<long> list = new List<long>(); for (int i = 0; i < folder.PackStreams.Count; i++) { list.Add(_db.PackSizes[firstPackStreamId + i]); } stream = DecoderStreamHelper.CreateDecoderStream(this._stream, folderStreamPos, list.ToArray(), folder, pw); this._cachedStreams.Add(folderIndex, stream); } return stream; }
public ArchiveDatabase ReadDatabase(IPasswordProvider pass) { ArchiveDatabase db = new ArchiveDatabase(); db.Clear(); db.MajorVersion = this._header[6]; db.MinorVersion = this._header[7]; if (db.MajorVersion != 0) { throw new InvalidOperationException(); } uint num = DataReader.Get32(this._header, 8); long num2 = (long) DataReader.Get64(this._header, 12); long num3 = (long) DataReader.Get64(this._header, 20); uint num4 = DataReader.Get32(this._header, 0x1c); uint maxValue = uint.MaxValue; if (CRC.Finish(CRC.Update(CRC.Update(CRC.Update(maxValue, num2), num3), num4)) != num) { throw new InvalidOperationException(); } db.StartPositionAfterHeader = this._streamOrigin + 0x20L; if (num3 == 0L) { db.Fill(); return db; } if (((num2 < 0L) || (num3 < 0L)) || (num3 > 0x7fffffffL)) { throw new InvalidOperationException(); } if (num2 > (this._streamEnding - db.StartPositionAfterHeader)) { throw new IndexOutOfRangeException(); } this._stream.Seek(num2, SeekOrigin.Current); byte[] buffer = new byte[num3]; Utils.ReadExact(this._stream, buffer, 0, buffer.Length); if (CRC.Finish(CRC.Update(uint.MaxValue, buffer, 0, buffer.Length)) != num4) { throw new InvalidOperationException(); } using (CStreamSwitch switch2 = new CStreamSwitch()) { switch2.Set(this, buffer); BlockType? nullable = this.ReadId(); if (((BlockType) nullable) != BlockType.Header) { if (((BlockType) nullable) != BlockType.EncodedHeader) { throw new InvalidOperationException(); } List<byte[]> list = this.ReadAndDecodePackedStreams(db.StartPositionAfterHeader, pass); if (list.Count == 0) { db.Fill(); return db; } if (list.Count != 1) { throw new InvalidOperationException(); } switch2.Set(this, list[0]); if (((BlockType) this.ReadId()) != BlockType.Header) { throw new InvalidOperationException(); } } this.ReadHeader(db, pass); } db.Fill(); return db; }
public Stream OpenStream(ArchiveDatabase _db, int fileIndex, IPasswordProvider pw) { int folderIndex = _db.FileIndexToFolderIndexMap[fileIndex]; int num2 = _db.NumUnpackStreamsVector[folderIndex]; int num3 = _db.FolderStartFileIndex[folderIndex]; if ((num3 > fileIndex) || ((fileIndex - num3) >= num2)) { throw new InvalidOperationException(); } int num4 = fileIndex - num3; long num5 = 0L; for (int i = 0; i < num4; i++) { num5 += _db.Files[num3 + i].Size; } Stream stream = this.GetCachedDecoderStream(_db, folderIndex, pw); stream.Position = num5; return new ReadOnlySubStream(stream, _db.Files[fileIndex].Size); }