Esempio n. 1
0
        // returns false if fails, will get called on every entry before closing in update mode
        // can throw InvalidDataException
        internal bool LoadLocalHeaderExtraFieldAndCompressedBytesIfNeeded()
        {
            string message;

            // we should have made this exact call in _archive.Init through ThrowIfOpenable
            Debug.Assert(IsOpenable(false, true, out message));

            // load local header's extra fields. it will be null if we couldn't read for some reason
            if (_originallyInArchive)
            {
                _archive.ArchiveStream.Seek(_offsetOfLocalHeader, SeekOrigin.Begin);

                _lhUnknownExtraFields = ZipLocalFileHeader.GetExtraFields(_archive.ArchiveReader);
            }

            if (!_everOpenedForWrite && _originallyInArchive)
            {
                // we know that it is openable at this point

                _compressedBytes = new byte[(_compressedSize / MaxSingleBufferSize) + 1][];
                for (int i = 0; i < _compressedBytes.Length - 1; i++)
                {
                    _compressedBytes[i] = new byte[MaxSingleBufferSize];
                }
                _compressedBytes[_compressedBytes.Length - 1] = new byte[_compressedSize % MaxSingleBufferSize];

                _archive.ArchiveStream.Seek(OffsetOfCompressedData, SeekOrigin.Begin);

                for (int i = 0; i < _compressedBytes.Length - 1; i++)
                {
                    ZipHelper.ReadBytes(_archive.ArchiveStream, _compressedBytes[i], MaxSingleBufferSize);
                }
                ZipHelper.ReadBytes(_archive.ArchiveStream, _compressedBytes[_compressedBytes.Length - 1], ( int )(_compressedSize % MaxSingleBufferSize));
            }

            return(true);
        }
Esempio n. 2
0
        private bool IsOpenable(bool needToUncompress, bool needToLoadIntoMemory, out string message)
        {
            message = null;

            if (_originallyInArchive)
            {
                if (needToUncompress)
                {
                    if (CompressionMethod != CompressionMethodValues.Stored &&
                        CompressionMethod != CompressionMethodValues.Deflate &&
                        CompressionMethod != CompressionMethodValues.Deflate64)
                    {
                        switch (CompressionMethod)
                        {
                        case CompressionMethodValues.BZip2:
                        case CompressionMethodValues.LZMA:
                            message = $"Unsupport compression method:{CompressionMethod.ToString()}";
                            break;

                        default:
                            message = $"Unsupport compression method";
                            break;
                        }
                        return(false);
                    }
                }
                if (_diskNumberStart != _archive.NumberOfThisDisk)
                {
                    message = "split spanned";
                    return(false);
                }
                if (_offsetOfLocalHeader > _archive.ArchiveStream.Length)
                {
                    message = "local file header corrupt";
                    return(false);
                }
                _archive.ArchiveStream.Seek(_offsetOfLocalHeader, SeekOrigin.Begin);
                if (!ZipLocalFileHeader.TrySkipBlock(_archive.ArchiveReader))
                {
                    message = "local file header corrupt";
                    return(false);
                }
                // when this property gets called, some duplicated work
                if (OffsetOfCompressedData + _compressedSize > _archive.ArchiveStream.Length)
                {
                    message = "local file header corrupt";
                    return(false);
                }
                // This limitation originally existed because a) it is unreasonable to load > 4GB into memory
                // but also because the stream reading functions make it hard.  This has been updated to handle
                // this scenario in a 64-bit process using multiple buffers, delivered first as an OOB for
                // compatibility.
                if (needToLoadIntoMemory)
                {
                    if (_compressedSize > int.MaxValue)
                    {
                        if (!s_allowLargeReadOnlyZipArchiveEntriesInUpdateMode)
                        {
                            message = "entry too large";
                            return(false);
                        }
                    }
                }
            }

            return(true);
        }