internal override int CabCloseStreamEx(int streamHandle, out int err, IntPtr pv) { Stream stream = DuplicateStream.OriginalStream(StreamHandles[streamHandle]); if (stream == DuplicateStream.OriginalStream(FileStream)) { context.CloseFileReadStream(currentFileName, stream); FileStream = null; long remainder = currentFileTotalBytes - currentFileBytesProcessed; currentFileBytesProcessed += remainder; fileBytesProcessed += remainder; OnProgress(ArchiveProgressType.FinishFile); currentFileTotalBytes = 0; currentFileBytesProcessed = 0; currentFileName = null; } else if (stream == DuplicateStream.OriginalStream(CabStream)) { if (stream.CanWrite) { stream.Flush(); } currentArchiveBytesProcessed = currentArchiveTotalBytes; OnProgress(ArchiveProgressType.FinishArchive); currentArchiveNumber++; totalArchives++; context.CloseArchiveWriteStream( currentArchiveNumber, currentArchiveName, stream); currentArchiveName = NextCabinetName; currentArchiveBytesProcessed = currentArchiveTotalBytes = 0; totalFolderBytesProcessedInCurrentCab = 0; CabStream = null; } else // Must be a temp stream { stream.Close(); tempStreams.Remove(stream); } return(base.CabCloseStreamEx(streamHandle, out err, pv)); }
/// <summary> /// Creates a zip archive or chain of zip archives. /// </summary> /// <param name="streamContext">A context interface to handle opening /// and closing of archive and file streams.</param> /// <param name="files">An array of file lists. Each list is /// compressed into one stream in the archive.</param> /// <param name="maxArchiveSize">The maximum number of bytes for one archive /// before the contents are chained to the next archive, or zero for unlimited /// archive size.</param> /// <exception cref="ArchiveException">The archive could not be /// created.</exception> /// <remarks> /// The stream context implementation may provide a mapping from the file /// paths within the archive to the external file paths. /// </remarks> public override void Pack( IPackStreamContext streamContext, IEnumerable<string> files, long maxArchiveSize) { if (streamContext == null) { throw new ArgumentNullException("streamContext"); } if (files == null) { throw new ArgumentNullException("files"); } lock (this) { Stream archiveStream = null; try { this.ResetProgressData(); this.totalArchives = 1; object forceZip64Value = streamContext.GetOption("forceZip64", null); bool forceZip64 = Convert.ToBoolean( forceZip64Value, CultureInfo.InvariantCulture); // Count the total number of files and bytes to be compressed. foreach (string file in files) { FileAttributes attributes; DateTime lastWriteTime; Stream fileStream = streamContext.OpenFileReadStream( file, out attributes, out lastWriteTime); if (fileStream != null) { this.totalFileBytes += fileStream.Length; this.totalFiles++; streamContext.CloseFileReadStream(file, fileStream); } } List<ZipFileHeader> fileHeaders = new List<ZipFileHeader>(); this.currentFileNumber = -1; if (this.currentArchiveName == null) { this.mainArchiveName = streamContext.GetArchiveName(0); this.currentArchiveName = this.mainArchiveName; if (string.IsNullOrWhiteSpace(this.currentArchiveName)) { throw new FileNotFoundException("No name provided for archive."); } } this.OnProgress(ArchiveProgressType.StartArchive); // Compress files one by one, saving header info for each. foreach (string file in files) { ZipFileHeader fileHeader = this.PackOneFile( streamContext, file, maxArchiveSize, forceZip64, ref archiveStream); if (fileHeader != null) { fileHeaders.Add(fileHeader); } this.currentArchiveTotalBytes = (archiveStream != null ? archiveStream.Position : 0); this.currentArchiveBytesProcessed = this.currentArchiveTotalBytes; } bool zip64 = forceZip64 || this.totalFiles > UInt16.MaxValue; // Write the central directory composed of all the file headers. uint centralDirStartArchiveNumber = 0; long centralDirStartPosition = 0; long centralDirSize = 0; for (int i = 0; i < fileHeaders.Count; i++) { ZipFileHeader fileHeader = fileHeaders[i]; int headerSize = fileHeader.GetSize(true); centralDirSize += headerSize; this.CheckArchiveWriteStream( streamContext, maxArchiveSize, headerSize, ref archiveStream); if (i == 0) { centralDirStartArchiveNumber = (uint) this.currentArchiveNumber; centralDirStartPosition = archiveStream.Position; } fileHeader.Write(archiveStream, true); if (fileHeader.zip64) { zip64 = true; } } this.currentArchiveTotalBytes = (archiveStream != null ? archiveStream.Position : 0); this.currentArchiveBytesProcessed = this.currentArchiveTotalBytes; ZipEndOfCentralDirectory eocd = new ZipEndOfCentralDirectory(); eocd.dirStartDiskNumber = centralDirStartArchiveNumber; eocd.entriesOnDisk = fileHeaders.Count; eocd.totalEntries = fileHeaders.Count; eocd.dirSize = centralDirSize; eocd.dirOffset = centralDirStartPosition; eocd.comment = this.comment; Zip64EndOfCentralDirectoryLocator eocdl = new Zip64EndOfCentralDirectoryLocator(); int maxFooterSize = eocd.GetSize(false); if (archiveStream != null && (zip64 || archiveStream.Position > ((long) UInt32.MaxValue) - eocd.GetSize(false))) { maxFooterSize += eocd.GetSize(true) + (int) Zip64EndOfCentralDirectoryLocator.EOCDL64_SIZE; zip64 = true; } this.CheckArchiveWriteStream( streamContext, maxArchiveSize, maxFooterSize, ref archiveStream); eocd.diskNumber = (uint) this.currentArchiveNumber; if (zip64) { eocd.versionMadeBy = 45; eocd.versionNeeded = 45; eocd.zip64 = true; eocdl.dirOffset = archiveStream.Position; eocdl.dirStartDiskNumber = (uint) this.currentArchiveNumber; eocdl.totalDisks = (uint) this.currentArchiveNumber + 1; eocd.Write(archiveStream); eocdl.Write(archiveStream); eocd.dirOffset = UInt32.MaxValue; eocd.dirStartDiskNumber = UInt16.MaxValue; } eocd.zip64 = false; eocd.Write(archiveStream); this.currentArchiveTotalBytes = archiveStream.Position; this.currentArchiveBytesProcessed = this.currentArchiveTotalBytes; } finally { if (archiveStream != null) { streamContext.CloseArchiveWriteStream( this.currentArchiveNumber, this.mainArchiveName, archiveStream); this.OnProgress(ArchiveProgressType.FinishArchive); } } } }
/// <summary> /// Adds one file to a zip archive in the process of being created. /// </summary> private ZipFileHeader PackOneFile( IPackStreamContext streamContext, string file, long maxArchiveSize, bool forceZip64, ref Stream archiveStream) { Stream fileStream = null; int headerArchiveNumber = 0; try { // TODO: call GetOption to get compression method for the specific file ZipCompressionMethod compressionMethod = ZipCompressionMethod.Deflate; if (this.CompressionLevel == CompressionLevel.None) { compressionMethod = ZipCompressionMethod.Store; } Converter<Stream, Stream> compressionStreamCreator; if (!ZipEngine.compressionStreamCreators.TryGetValue( compressionMethod, out compressionStreamCreator)) { return null; } FileAttributes attributes; DateTime lastWriteTime; fileStream = streamContext.OpenFileReadStream( file, out attributes, out lastWriteTime); if (fileStream == null) { return null; } this.currentFileName = file; this.currentFileNumber++; this.currentFileTotalBytes = fileStream.Length; this.currentFileBytesProcessed = 0; this.OnProgress(ArchiveProgressType.StartFile); ZipFileInfo fileInfo = new ZipFileInfo( file, this.currentArchiveNumber, attributes, lastWriteTime, fileStream.Length, 0, compressionMethod); bool zip64 = forceZip64 || fileStream.Length >= (long) UInt32.MaxValue; ZipFileHeader fileHeader = new ZipFileHeader(fileInfo, zip64); this.CheckArchiveWriteStream( streamContext, maxArchiveSize, fileHeader.GetSize(false), ref archiveStream); long headerPosition = archiveStream.Position; fileHeader.Write(archiveStream, false); headerArchiveNumber = this.currentArchiveNumber; uint crc; long bytesWritten = this.PackFileBytes( streamContext, fileStream, maxArchiveSize, compressionStreamCreator, ref archiveStream, out crc); fileHeader.Update( bytesWritten, fileStream.Length, crc, headerPosition, headerArchiveNumber); streamContext.CloseFileReadStream(file, fileStream); fileStream = null; // Go back and rewrite the updated file header. if (this.currentArchiveNumber == headerArchiveNumber) { long fileEndPosition = archiveStream.Position; archiveStream.Seek(headerPosition, SeekOrigin.Begin); fileHeader.Write(archiveStream, false); archiveStream.Seek(fileEndPosition, SeekOrigin.Begin); } else { // The file spanned archives, so temporarily reopen // the archive where it started. string headerArchiveName = streamContext.GetArchiveName( headerArchiveNumber + 1); Stream headerStream = null; try { headerStream = streamContext.OpenArchiveWriteStream( headerArchiveNumber, headerArchiveName, false, this); headerStream.Seek(headerPosition, SeekOrigin.Begin); fileHeader.Write(headerStream, false); } finally { if (headerStream != null) { streamContext.CloseArchiveWriteStream( headerArchiveNumber, headerArchiveName, headerStream); } } } this.OnProgress(ArchiveProgressType.FinishFile); return fileHeader; } finally { if (fileStream != null) { streamContext.CloseFileReadStream( this.currentFileName, fileStream); } } }
/// <summary> /// Creates a zip archive or chain of zip archives. /// </summary> /// <param name="streamContext">A context interface to handle opening /// and closing of archive and file streams.</param> /// <param name="files">An array of file lists. Each list is /// compressed into one stream in the archive.</param> /// <param name="maxArchiveSize">The maximum number of bytes for one archive /// before the contents are chained to the next archive, or zero for unlimited /// archive size.</param> /// <exception cref="ArchiveException">The archive could not be /// created.</exception> /// <remarks> /// The stream context implementation may provide a mapping from the file /// paths within the archive to the external file paths. /// </remarks> public override void Pack( IPackStreamContext streamContext, IEnumerable <string> files, long maxArchiveSize) { if (streamContext == null) { throw new ArgumentNullException("streamContext"); } if (files == null) { throw new ArgumentNullException("files"); } lock (this) { Stream archiveStream = null; try { this.ResetProgressData(); this.totalArchives = 1; object forceZip64Value = streamContext.GetOption("forceZip64", null); bool forceZip64 = Convert.ToBoolean( forceZip64Value, CultureInfo.InvariantCulture); // Count the total number of files and bytes to be compressed. foreach (string file in files) { FileAttributes attributes; DateTime lastWriteTime; Stream fileStream = streamContext.OpenFileReadStream( file, out attributes, out lastWriteTime); if (fileStream != null) { this.totalFileBytes += fileStream.Length; this.totalFiles++; streamContext.CloseFileReadStream(file, fileStream); } } List <ZipFileHeader> fileHeaders = new List <ZipFileHeader>(); this.currentFileNumber = -1; if (this.currentArchiveName == null) { this.mainArchiveName = streamContext.GetArchiveName(0); this.currentArchiveName = this.mainArchiveName; if (string.IsNullOrWhiteSpace(this.currentArchiveName)) { throw new FileNotFoundException("No name provided for archive."); } } this.OnProgress(ArchiveProgressType.StartArchive); // Compress files one by one, saving header info for each. foreach (string file in files) { ZipFileHeader fileHeader = this.PackOneFile( streamContext, file, maxArchiveSize, forceZip64, ref archiveStream); if (fileHeader != null) { fileHeaders.Add(fileHeader); } this.currentArchiveTotalBytes = (archiveStream != null ? archiveStream.Position : 0); this.currentArchiveBytesProcessed = this.currentArchiveTotalBytes; } bool zip64 = forceZip64 || this.totalFiles > UInt16.MaxValue; // Write the central directory composed of all the file headers. uint centralDirStartArchiveNumber = 0; long centralDirStartPosition = 0; long centralDirSize = 0; for (int i = 0; i < fileHeaders.Count; i++) { ZipFileHeader fileHeader = fileHeaders[i]; int headerSize = fileHeader.GetSize(true); centralDirSize += headerSize; this.CheckArchiveWriteStream( streamContext, maxArchiveSize, headerSize, ref archiveStream); if (i == 0) { centralDirStartArchiveNumber = (uint)this.currentArchiveNumber; centralDirStartPosition = archiveStream.Position; } fileHeader.Write(archiveStream, true); if (fileHeader.zip64) { zip64 = true; } } this.currentArchiveTotalBytes = (archiveStream != null ? archiveStream.Position : 0); this.currentArchiveBytesProcessed = this.currentArchiveTotalBytes; ZipEndOfCentralDirectory eocd = new ZipEndOfCentralDirectory(); eocd.dirStartDiskNumber = centralDirStartArchiveNumber; eocd.entriesOnDisk = fileHeaders.Count; eocd.totalEntries = fileHeaders.Count; eocd.dirSize = centralDirSize; eocd.dirOffset = centralDirStartPosition; eocd.comment = this.comment; Zip64EndOfCentralDirectoryLocator eocdl = new Zip64EndOfCentralDirectoryLocator(); int maxFooterSize = eocd.GetSize(false); if (archiveStream != null && (zip64 || archiveStream.Position > ((long)UInt32.MaxValue) - eocd.GetSize(false))) { maxFooterSize += eocd.GetSize(true) + (int) Zip64EndOfCentralDirectoryLocator.EOCDL64_SIZE; zip64 = true; } this.CheckArchiveWriteStream( streamContext, maxArchiveSize, maxFooterSize, ref archiveStream); eocd.diskNumber = (uint)this.currentArchiveNumber; if (zip64) { eocd.versionMadeBy = 45; eocd.versionNeeded = 45; eocd.zip64 = true; eocdl.dirOffset = archiveStream.Position; eocdl.dirStartDiskNumber = (uint)this.currentArchiveNumber; eocdl.totalDisks = (uint)this.currentArchiveNumber + 1; eocd.Write(archiveStream); eocdl.Write(archiveStream); eocd.dirOffset = UInt32.MaxValue; eocd.dirStartDiskNumber = UInt16.MaxValue; } eocd.zip64 = false; eocd.Write(archiveStream); this.currentArchiveTotalBytes = archiveStream.Position; this.currentArchiveBytesProcessed = this.currentArchiveTotalBytes; } finally { if (archiveStream != null) { streamContext.CloseArchiveWriteStream( this.currentArchiveNumber, this.mainArchiveName, archiveStream); this.OnProgress(ArchiveProgressType.FinishArchive); } } } }
/// <summary> /// Adds one file to a zip archive in the process of being created. /// </summary> private ZipFileHeader PackOneFile( IPackStreamContext streamContext, string file, long maxArchiveSize, bool forceZip64, ref Stream archiveStream) { Stream fileStream = null; int headerArchiveNumber = 0; try { // TODO: call GetOption to get compression method for the specific file ZipCompressionMethod compressionMethod = ZipCompressionMethod.Deflate; if (this.CompressionLevel == CompressionLevel.None) { compressionMethod = ZipCompressionMethod.Store; } Func <Stream, Stream> compressionStreamCreator; if (!ZipEngine.compressionStreamCreators.TryGetValue( compressionMethod, out compressionStreamCreator)) { return(null); } FileAttributes attributes; DateTime lastWriteTime; fileStream = streamContext.OpenFileReadStream( file, out attributes, out lastWriteTime); if (fileStream == null) { return(null); } this.currentFileName = file; this.currentFileNumber++; this.currentFileTotalBytes = fileStream.Length; this.currentFileBytesProcessed = 0; this.OnProgress(ArchiveProgressType.StartFile); ZipFileInfo fileInfo = new ZipFileInfo( file, this.currentArchiveNumber, attributes, lastWriteTime, fileStream.Length, 0, compressionMethod); bool zip64 = forceZip64 || fileStream.Length >= (long)UInt32.MaxValue; ZipFileHeader fileHeader = new ZipFileHeader(fileInfo, zip64); this.CheckArchiveWriteStream( streamContext, maxArchiveSize, fileHeader.GetSize(false), ref archiveStream); long headerPosition = archiveStream.Position; fileHeader.Write(archiveStream, false); headerArchiveNumber = this.currentArchiveNumber; uint crc; long bytesWritten = this.PackFileBytes( streamContext, fileStream, maxArchiveSize, compressionStreamCreator, ref archiveStream, out crc); fileHeader.Update( bytesWritten, fileStream.Length, crc, headerPosition, headerArchiveNumber); streamContext.CloseFileReadStream(file, fileStream); fileStream = null; // Go back and rewrite the updated file header. if (this.currentArchiveNumber == headerArchiveNumber) { long fileEndPosition = archiveStream.Position; archiveStream.Seek(headerPosition, SeekOrigin.Begin); fileHeader.Write(archiveStream, false); archiveStream.Seek(fileEndPosition, SeekOrigin.Begin); } else { // The file spanned archives, so temporarily reopen // the archive where it started. string headerArchiveName = streamContext.GetArchiveName( headerArchiveNumber + 1); Stream headerStream = null; try { headerStream = streamContext.OpenArchiveWriteStream( headerArchiveNumber, headerArchiveName, false, this); headerStream.Seek(headerPosition, SeekOrigin.Begin); fileHeader.Write(headerStream, false); } finally { if (headerStream != null) { streamContext.CloseArchiveWriteStream( headerArchiveNumber, headerArchiveName, headerStream); } } } this.OnProgress(ArchiveProgressType.FinishFile); return(fileHeader); } finally { if (fileStream != null) { streamContext.CloseFileReadStream( this.currentFileName, fileStream); } } }
public void Pack( IPackStreamContext streamContext, IEnumerable <string> files, long maxArchiveSize) { if (streamContext == null) { throw new ArgumentNullException("streamContext"); } if (files == null) { throw new ArgumentNullException("files"); } lock (this) { try { _context = streamContext; ResetProgressData(); CreateFci(maxArchiveSize); foreach (var file in files) { FileAttributes attributes; DateTime lastWriteTime; var fileStream = _context.OpenFileReadStream( file, out attributes, out lastWriteTime); if (fileStream != null) { TotalFileBytes += fileStream.Length; TotalFiles++; _context.CloseFileReadStream(file, fileStream); } } long uncompressedBytesInFolder = 0; CurrentFileNumber = -1; foreach (var file in files) { FileAttributes attributes; DateTime lastWriteTime; var fileStream = _context.OpenFileReadStream( file, out attributes, out lastWriteTime); if (fileStream == null) { continue; } if (fileStream.Length >= NativeMethods.FCI.MAX_FOLDER) { throw new NotSupportedException(string.Format( CultureInfo.InvariantCulture, "File {0} exceeds maximum file size " + "for cabinet format.", file)); } if (uncompressedBytesInFolder > 0) { // Automatically create a new folder if this file // won't fit in the current folder. var nextFolder = uncompressedBytesInFolder + fileStream.Length >= NativeMethods.FCI.MAX_FOLDER; // Otherwise ask the client if it wants to // move to the next folder. if (!nextFolder) { var nextFolderOption = streamContext.GetOption( "nextFolder", new object[] { file, CurrentFolderNumber }); nextFolder = Convert.ToBoolean( nextFolderOption, CultureInfo.InvariantCulture); } if (nextFolder) { FlushFolder(); uncompressedBytesInFolder = 0; } } if (CurrentFolderTotalBytes > 0) { CurrentFolderTotalBytes = 0; CurrentFolderNumber++; uncompressedBytesInFolder = 0; } CurrentFileName = file; CurrentFileNumber++; CurrentFileTotalBytes = fileStream.Length; CurrentFileBytesProcessed = 0; OnProgress(ArchiveProgressType.StartFile); uncompressedBytesInFolder += fileStream.Length; AddFile( file, fileStream, attributes, lastWriteTime, false, CompressionLevel); } FlushFolder(); FlushCabinet(); } finally { if (CabStream != null) { _context.CloseArchiveWriteStream( CurrentArchiveNumber, CurrentArchiveName, CabStream); CabStream = null; } if (FileStream != null) { _context.CloseFileReadStream( CurrentFileName, FileStream); FileStream = null; } _context = null; if (_fciHandle != IntPtr.Zero) { NativeMethods.FCI.Destroy(_fciHandle); _fciHandle = IntPtr.Zero; } } } }
public void Pack(IPackStreamContext streamContext, IEnumerable <string> files, long maxArchiveSize) { if (streamContext == null) { throw new ArgumentNullException("streamContext"); } if (files == null) { throw new ArgumentNullException("files"); } checked { lock (this) { try { context = streamContext; ResetProgressData(); CreateFci(maxArchiveSize); foreach (string file in files) { FileAttributes attributes; DateTime lastWriteTime; Stream stream = context.OpenFileReadStream(file, out attributes, out lastWriteTime); if (stream != null) { totalFileBytes += stream.Length; totalFiles++; context.CloseFileReadStream(file, stream); } } long num = 0L; currentFileNumber = -1; foreach (string file2 in files) { FileAttributes attributes2; DateTime lastWriteTime2; Stream stream2 = context.OpenFileReadStream(file2, out attributes2, out lastWriteTime2); if (stream2 == null) { continue; } if (stream2.Length >= 2147450880) { throw new NotSupportedException(string.Format(CultureInfo.InvariantCulture, "File {0} exceeds maximum file size for cabinet format.", file2)); } if (num > 0) { bool flag = num + stream2.Length >= 2147450880; if (!flag) { flag = Convert.ToBoolean(streamContext.GetOption("nextFolder", new object[2] { file2, currentFolderNumber }), CultureInfo.InvariantCulture); } if (flag) { FlushFolder(); num = 0L; } } if (currentFolderTotalBytes > 0) { currentFolderTotalBytes = 0L; currentFolderNumber++; num = 0L; } currentFileName = file2; currentFileNumber++; currentFileTotalBytes = stream2.Length; currentFileBytesProcessed = 0L; OnProgress(ArchiveProgressType.StartFile); num += stream2.Length; AddFile(file2, stream2, attributes2, lastWriteTime2, execute: false, CompressionLevel); } FlushFolder(); FlushCabinet(); } finally { if (base.CabStream != null) { context.CloseArchiveWriteStream(currentArchiveNumber, currentArchiveName, base.CabStream); base.CabStream = null; } if (base.FileStream != null) { context.CloseFileReadStream(currentFileName, base.FileStream); base.FileStream = null; } context = null; if (fciHandle != null) { fciHandle.Dispose(); fciHandle = null; } } } } }