Example #1
0
		/// <summary>
		/// Test an archive for integrity/validity
		/// </summary>
		/// <param name="testData">Perform low level data Crc check</param>
		/// <param name="strategy">The <see cref="TestStrategy"></see> to apply.</param>
		/// <param name="resultHandler">The <see cref="ZipTestResultHandler"></see> handler to call during testing.</param>
		/// <returns>true if all tests pass, false otherwise</returns>
		/// <exception cref="ObjectDisposedException">The object has already been closed.</exception>
		public bool TestArchive(bool testData, TestStrategy strategy, ZipTestResultHandler resultHandler)
		{
			if (isDisposed_) {
				throw new ObjectDisposedException("ZipFile");
			}
			
			var status = new TestStatus(this);

			if ( resultHandler != null ) {
				resultHandler(status, null);
			}

			var test = testData ? (HeaderTest.Header | HeaderTest.Extract) : HeaderTest.Header;

			var testing = true;

			try {
				var entryIndex = 0;

				while ( testing && (entryIndex < Count) ) {
					if ( resultHandler != null ) {
						status.SetEntry(this[entryIndex]);
						status.SetOperation(TestOperation.EntryHeader);
						resultHandler(status, null);
					}

					try	{
						TestLocalHeader(this[entryIndex], test);
					}
					catch(ZipException ex) {
						status.AddError();

						if ( resultHandler != null ) {
							resultHandler(status,
								string.Format("Exception during test - '{0}'", ex.Message));
						}

						if ( strategy == TestStrategy.FindFirstError ) {
							testing = false; 
						}
					}

					if ( testing && testData && this[entryIndex].IsFile ) {
						if ( resultHandler != null ) {
							status.SetOperation(TestOperation.EntryData);
							resultHandler(status, null);
						}

                        var crc = new Crc32();

                        using (var entryStream = GetInputStream(this[entryIndex]))
                        {

                            var buffer = new byte[4096];
                            long totalBytes = 0;
                            int bytesRead;
                            while ((bytesRead = entryStream.Read(buffer, 0, buffer.Length)) > 0)
                            {
                                crc.Update(buffer, 0, bytesRead);

                                if (resultHandler != null)
                                {
                                    totalBytes += bytesRead;
                                    status.SetBytesTested(totalBytes);
                                    resultHandler(status, null);
                                }
                            }
                        }

						if (this[entryIndex].Crc != crc.Value) {
							status.AddError();
							
							if ( resultHandler != null ) {
								resultHandler(status, "CRC mismatch");
							}

							if ( strategy == TestStrategy.FindFirstError ) {
								testing = false;
							}
						}

						if (( this[entryIndex].Flags & (int)GeneralBitFlags.Descriptor) != 0 ) {
							var helper = new ZipHelperStream(baseStream_);
							var data = new DescriptorData();
							helper.ReadDataDescriptor(this[entryIndex].LocalHeaderRequiresZip64, data);
							if (this[entryIndex].Crc != data.Crc) {
								status.AddError();
							}

							if (this[entryIndex].CompressedSize != data.CompressedSize) {
								status.AddError();
							}

							if (this[entryIndex].Size != data.Size) {
								status.AddError();
							}
						}
					}

					if ( resultHandler != null ) {
						status.SetOperation(TestOperation.EntryComplete);
						resultHandler(status, null);
					}

					entryIndex += 1;
				}

				if ( resultHandler != null ) {
					status.SetOperation(TestOperation.MiscellaneousTests);
					resultHandler(status, null);
				}

				// TODO: the 'Corrina Johns' test where local headers are missing from
				// the central directory.  They are therefore invisible to many archivers.
			}
			catch (Exception ex) {
				status.AddError();

				if ( resultHandler != null ) {
					resultHandler(status, string.Format("Exception during test - '{0}'", ex.Message));
				}
			}

			if ( resultHandler != null ) {
				status.SetOperation(TestOperation.Complete);
				status.SetEntry(null);
				resultHandler(status, null);
			}

			return (status.ErrorCount == 0);
		}
Example #2
0
		void RunUpdates()
		{
			long sizeEntries = 0;
			long endOfStream = 0;
			var directUpdate = false;
			long destinationPosition = 0; // NOT SFX friendly

			ZipFile workFile;

			if ( IsNewArchive ) {
				workFile = this;
				workFile.baseStream_.Position = 0;
				directUpdate = true;
			}
			else if ( archiveStorage_.UpdateMode == FileUpdateMode.Direct ) {
				workFile = this;
				workFile.baseStream_.Position = 0;
				directUpdate = true;

				// Sort the updates by offset within copies/modifies, then adds.
				// This ensures that data required by copies will not be overwritten.
				updates_.Sort(new UpdateComparer());
			}
			else {
				workFile = Create(archiveStorage_.GetTemporaryOutput());
				workFile.UseZip64 = UseZip64;
				
				if (key != null) {
					workFile.key = (byte[])key.Clone();
				}
			}

			try {
				foreach ( ZipUpdate update in updates_ ) {
					if (update != null) {
						switch (update.Command) {
							case UpdateCommand.Copy:
								if (directUpdate) {
									CopyEntryDirect(workFile, update, ref destinationPosition);
								}
								else {
									CopyEntry(workFile, update);
								}
								break;

							case UpdateCommand.Modify:
								// TODO: Direct modifying of an entry will take some legwork.
								ModifyEntry(workFile, update);
								break;

							case UpdateCommand.Add:
								if (!IsNewArchive && directUpdate) {
									workFile.baseStream_.Position = destinationPosition;
								}

								AddEntry(workFile, update);

								if (directUpdate) {
									destinationPosition = workFile.baseStream_.Position;
								}
								break;
						}
					}
				}

				if ( !IsNewArchive && directUpdate ) {
					workFile.baseStream_.Position = destinationPosition;
				}

				var centralDirOffset = workFile.baseStream_.Position;

				foreach ( ZipUpdate update in updates_ ) {
					if (update != null) {
						sizeEntries += workFile.WriteCentralDirectoryHeader(update.OutEntry);
					}
				}

				var theComment = (newComment_ != null) ? newComment_.RawComment : ZipConstants.ConvertToArray(comment_);
				using ( var zhs = new ZipHelperStream(workFile.baseStream_) ) {
					zhs.WriteEndOfCentralDirectory(updateCount_, sizeEntries, centralDirOffset, theComment);
				}

				endOfStream = workFile.baseStream_.Position;

				// And now patch entries...
				foreach ( ZipUpdate update in updates_ ) {
					if (update != null)
					{
						// If the size of the entry is zero leave the crc as 0 as well.
						// The calculated crc will be all bits on...
						if ((update.CrcPatchOffset > 0) && (update.OutEntry.CompressedSize > 0)) {
							workFile.baseStream_.Position = update.CrcPatchOffset;
							workFile.WriteLEInt((int)update.OutEntry.Crc);
						}

						if (update.SizePatchOffset > 0) {
							workFile.baseStream_.Position = update.SizePatchOffset;
							if (update.OutEntry.LocalHeaderRequiresZip64) {
								workFile.WriteLeLong(update.OutEntry.Size);
								workFile.WriteLeLong(update.OutEntry.CompressedSize);
							}
							else {
								workFile.WriteLEInt((int)update.OutEntry.CompressedSize);
								workFile.WriteLEInt((int)update.OutEntry.Size);
							}
						}
					}
				}
			}
			catch {
				workFile.Close();
				if (!directUpdate && (workFile.Name != null)) {
					File.Delete(workFile.Name);
				}
				throw;
			}

			if (directUpdate) {
				workFile.baseStream_.SetLength(endOfStream);
				workFile.baseStream_.Flush();
				isNewArchive_ = false;
				ReadEntries();
			}
			else {
				baseStream_.Close();
				Reopen(archiveStorage_.ConvertTemporaryToFinal());
			}
		}
Example #3
0
		// NOTE this returns the offset of the first byte after the signature.
		long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData)
		{
			using ( var les = new ZipHelperStream(baseStream_) ) {
				return les.LocateBlockWithSignature(signature, endLocation, minimumBlockSize, maximumVariableData);
			}
		}
Example #4
0
		void AddEntry(ZipFile workFile, ZipUpdate update)
		{
			Stream source = null;

			if ( update.Entry.IsFile ) {
				source = update.GetSource();
				
				if ( source == null ) {
					source = updateDataSource_.GetSource(update.Entry, update.Filename);
				}
			}

			if ( source != null ) {
				using ( source ) {
					var sourceStreamLength = source.Length;
					if ( update.OutEntry.Size < 0 ) {
						update.OutEntry.Size = sourceStreamLength;
					}
					else {
						// Check for errant entries.
						if ( update.OutEntry.Size != sourceStreamLength ) {
							throw new ZipException("Entry size/stream size mismatch");
						}
					}

					workFile.WriteLocalEntryHeader(update);

					var dataStart = workFile.baseStream_.Position;

					using ( var output = workFile.GetOutputStream(update.OutEntry) ) {
						CopyBytes(update, output, source, sourceStreamLength, true);
					}

					var dataEnd = workFile.baseStream_.Position;
					update.OutEntry.CompressedSize = dataEnd - dataStart;

					if ((update.OutEntry.Flags & (int)GeneralBitFlags.Descriptor) == (int)GeneralBitFlags.Descriptor)
					{
						var helper = new ZipHelperStream(workFile.baseStream_);
						helper.WriteDataDescriptor(update.OutEntry);
					}
				}
			}
			else {
				workFile.WriteLocalEntryHeader(update);
				update.OutEntry.CompressedSize = 0;
			}

		}
Example #5
0
		void UpdateCommentOnly()
		{
			var baseLength = baseStream_.Length;

			ZipHelperStream updateFile = null;

			if ( archiveStorage_.UpdateMode == FileUpdateMode.Safe ) {
				var copyStream = archiveStorage_.MakeTemporaryCopy(baseStream_);
				updateFile = new ZipHelperStream(copyStream);
				updateFile.IsStreamOwner = true;

				baseStream_.Close();
				baseStream_ = null;
			}
			else {
				if (archiveStorage_.UpdateMode == FileUpdateMode.Direct) {
					// TODO: archiveStorage wasnt originally intended for this use.
					// Need to revisit this to tidy up handling as archive storage currently doesnt 
					// handle the original stream well.
					// The problem is when using an existing zip archive with an in memory archive storage.
					// The open stream wont support writing but the memory storage should open the same file not an in memory one.

					// Need to tidy up the archive storage interface and contract basically.
					baseStream_ = archiveStorage_.OpenForDirectUpdate(baseStream_);
					updateFile = new ZipHelperStream(baseStream_);
				}
				else {
					baseStream_.Close();
					baseStream_ = null;
					updateFile = new ZipHelperStream(Name);
				}
			}

			using ( updateFile ) {
				var locatedCentralDirOffset = 
					updateFile.LocateBlockWithSignature(ZipConstants.EndOfCentralDirectorySignature, 
														baseLength, ZipConstants.EndOfCentralRecordBaseSize, 0xffff);
				if ( locatedCentralDirOffset < 0 ) {
					throw new ZipException("Cannot find central directory");
				}

				const int CentralHeaderCommentSizeOffset = 16;
				updateFile.Position += CentralHeaderCommentSizeOffset;

				var rawComment = newComment_.RawComment;

				updateFile.WriteLEShort(rawComment.Length);
				updateFile.Write(rawComment, 0, rawComment.Length);
				updateFile.SetLength(updateFile.Position);
			}

			if ( archiveStorage_.UpdateMode == FileUpdateMode.Safe ) {
				Reopen(archiveStorage_.ConvertTemporaryToFinal());
			}
			else {
				ReadEntries();
			}
		}
		/// <summary>
		/// Get the binary data representing this instance.
		/// </summary>
		/// <returns>The raw binary data representing this instance.</returns>
		public byte[] GetData()
		{
			using (var ms = new MemoryStream())
			using (var helperStream = new ZipHelperStream(ms))
			{
				helperStream.IsStreamOwner = false;
				helperStream.WriteLEInt(0);       // Reserved
				helperStream.WriteLEShort(1);     // Tag
				helperStream.WriteLEShort(24);    // Length = 3 x 8.
				helperStream.WriteLELong(_lastModificationTime.ToFileTime());
				helperStream.WriteLELong(_lastAccessTime.ToFileTime());
				helperStream.WriteLELong(_createTime.ToFileTime());
				return ms.ToArray();
			}
		}
Example #7
0
		/// <summary>
		/// Commit current updates, updating this archive.
		/// </summary>
		/// <seealso cref="BeginUpdate()"></seealso>
		/// <seealso cref="AbortUpdate"></seealso>
		/// <exception cref="ObjectDisposedException">ZipFile has been closed.</exception>
		public void CommitUpdate()
		{
			if ( isDisposed_ ) {
				throw new ObjectDisposedException("ZipFile");
			}
			
			CheckUpdating();

			try {
				updateIndex_.Clear();
				updateIndex_=null;

				if( contentsEdited_ ) {
					RunUpdates();
				}
				else if( commentEdited_ ) {
					UpdateCommentOnly();
				}
				else {
					// Create an empty archive if none existed originally.
					if( entries_.Length==0 ) {
						var theComment=(newComment_!=null)?newComment_.RawComment:ZipConstants.ConvertToArray(comment_);
						using( var zhs=new ZipHelperStream(baseStream_) ) {
							zhs.WriteEndOfCentralDirectory(0, 0, 0, theComment);
						}
					}
				}

			}
			finally {
				PostUpdateCleanup();
			}
		}
		/// <summary>
		/// Set the data from the raw values provided.
		/// </summary>
		/// <param name="data">The raw data to extract values from.</param>
		/// <param name="index">The index to start extracting values from.</param>
		/// <param name="count">The number of bytes available.</param>
		public void SetData(byte[] data, int index, int count)
		{
			using (var ms = new MemoryStream(data, index, count, false)) 
			using (var helperStream = new ZipHelperStream(ms))
			{
				helperStream.ReadLEInt(); // Reserved
				while (helperStream.Position < helperStream.Length)
				{
					var ntfsTag = helperStream.ReadLEShort();
					var ntfsLength = helperStream.ReadLEShort();
					if (ntfsTag == 1)
					{
						if (ntfsLength >= 24)
						{
							var lastModificationTicks = helperStream.ReadLELong();
							_lastModificationTime = DateTime.FromFileTime(lastModificationTicks);

							var lastAccessTicks = helperStream.ReadLELong();
							_lastAccessTime = DateTime.FromFileTime(lastAccessTicks);

							var createTimeTicks = helperStream.ReadLELong();
							_createTime = DateTime.FromFileTime(createTimeTicks);
						}
						break;
					}
					else
					{
						// An unknown NTFS tag so simply skip it.
						helperStream.Seek(ntfsLength, SeekOrigin.Current);
					}
				}
			}
		}
	    /// <summary>
	    /// Get the binary data representing this instance.
	    /// </summary>
	    /// <returns>The raw binary data representing this instance.</returns>
	    public byte[] GetData()
	    {
	        using (var ms = new MemoryStream())
	        using (var helperStream = new ZipHelperStream(ms))
	        {
	            helperStream.IsStreamOwner = false;
	            helperStream.WriteByte((byte)_flags);     // Flags
	            if ( (_flags & Flags.ModificationTime) != 0) {
	                var span = _modificationTime.ToUniversalTime() - new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime();
	                var seconds = (int)span.TotalSeconds;
	                helperStream.WriteLEInt(seconds);
	            }
	            if ( (_flags & Flags.AccessTime) != 0) {
	                var span = _lastAccessTime.ToUniversalTime() - new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime();
	                var seconds = (int)span.TotalSeconds;
	                helperStream.WriteLEInt(seconds);
	            }
	            if ( (_flags & Flags.CreateTime) != 0) {
	                var span = _createTime.ToUniversalTime() - new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime();
	                var seconds = (int)span.TotalSeconds;
	                helperStream.WriteLEInt(seconds);
	            }
	            return ms.ToArray();
	        }
	    }
	    /// <summary>
	    /// Set the data from the raw values provided.
	    /// </summary>
	    /// <param name="data">The raw data to extract values from.</param>
	    /// <param name="index">The index to start extracting values from.</param>
	    /// <param name="count">The number of bytes available.</param>
	    public void SetData(byte[] data, int index, int count)
	    {
	        using (var ms = new MemoryStream(data, index, count, false))
	        using (var helperStream = new ZipHelperStream(ms))
	        {
	            // bit 0           if set, modification time is present
	            // bit 1           if set, access time is present
	            // bit 2           if set, creation time is present
				
	            _flags = (Flags)helperStream.ReadByte();
	            if (((_flags & Flags.ModificationTime) != 0) && (count >= 5))
	            {
	                var iTime = helperStream.ReadLEInt();

	                _modificationTime = (new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime() +
	                                     new TimeSpan(0, 0, 0, iTime, 0)).ToLocalTime();
	            }

	            if ((_flags & Flags.AccessTime) != 0)
	            {
	                var iTime = helperStream.ReadLEInt();

	                _lastAccessTime = (new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime() +
	                                   new TimeSpan(0, 0, 0, iTime, 0)).ToLocalTime();
	            }
				
	            if ((_flags & Flags.CreateTime) != 0)
	            {
	                var iTime = helperStream.ReadLEInt();

	                _createTime = (new DateTime(1970, 1, 1, 0, 0, 0).ToUniversalTime() +
	                               new TimeSpan(0, 0, 0, iTime, 0)).ToLocalTime();
	            }
	        }
	    }
Example #11
0
        /// <summary>
        /// Finishes the stream.  This will write the central directory at the
        /// end of the zip file and flush the stream.
        /// </summary>
        /// <remarks>
        /// This is automatically called when the stream is closed.
        /// </remarks>
        /// <exception cref="System.IO.IOException">
        /// An I/O error occurs.
        /// </exception>
        /// <exception cref="ZipException">
        /// Comment exceeds the maximum length<br/>
        /// Entry name exceeds the maximum length
        /// </exception>
        public override void Finish()
        {
            if (entries == null)
            {
                return;
            }

            if (curEntry != null)
            {
                CloseEntry();
            }

            long numEntries  = entries.Count;
            long sizeEntries = 0;

            foreach (ZipEntry entry in entries)
            {
                WriteLeInt(ZipConstants.CentralHeaderSignature);
                WriteLeShort(ZipConstants.VersionMadeBy);
                WriteLeShort(entry.Version);
                WriteLeShort(entry.Flags);
                WriteLeShort((short)entry.CompressionMethodForHeader);
                WriteLeInt((int)entry.DosTime);
                WriteLeInt((int)entry.Crc);

                if (entry.IsZip64Forced() ||
                    (entry.CompressedSize >= uint.MaxValue))
                {
                    WriteLeInt(-1);
                }
                else
                {
                    WriteLeInt((int)entry.CompressedSize);
                }

                if (entry.IsZip64Forced() ||
                    (entry.Size >= uint.MaxValue))
                {
                    WriteLeInt(-1);
                }
                else
                {
                    WriteLeInt((int)entry.Size);
                }

                var name = ZipConstants.ConvertToArray(entry.Flags, entry.Name);

                if (name.Length > 0xffff)
                {
                    throw new ZipException("Name too long.");
                }

                var ed = new ZipExtraData(entry.ExtraData);

                if (entry.CentralHeaderRequiresZip64)
                {
                    ed.StartNewEntry();
                    if (entry.IsZip64Forced() ||
                        (entry.Size >= 0xffffffff))
                    {
                        ed.AddLeLong(entry.Size);
                    }

                    if (entry.IsZip64Forced() ||
                        (entry.CompressedSize >= 0xffffffff))
                    {
                        ed.AddLeLong(entry.CompressedSize);
                    }

                    if (entry.Offset >= 0xffffffff)
                    {
                        ed.AddLeLong(entry.Offset);
                    }

                    ed.AddNewEntry(1);
                }
                else
                {
                    ed.Delete(1);
                }

#if !NET_1_1 && !NETCF_2_0
                if (entry.AESKeySize > 0)
                {
                    AddExtraDataAES(entry, ed);
                }
#endif
                var extra = ed.GetEntryData();

                var entryComment =
                    (entry.Comment != null) ?
                    ZipConstants.ConvertToArray(entry.Flags, entry.Comment) :
                    new byte[0];

                if (entryComment.Length > 0xffff)
                {
                    throw new ZipException("Comment too long.");
                }

                WriteLeShort(name.Length);
                WriteLeShort(extra.Length);
                WriteLeShort(entryComment.Length);
                WriteLeShort(0);                        // disk number
                WriteLeShort(0);                        // internal file attributes
                // external file attributes

                if (entry.ExternalFileAttributes != -1)
                {
                    WriteLeInt(entry.ExternalFileAttributes);
                }
                else
                {
                    if (entry.IsDirectory)                                               // mark entry as directory (from nikolam.AT.perfectinfo.com)
                    {
                        WriteLeInt(16);
                    }
                    else
                    {
                        WriteLeInt(0);
                    }
                }

                if (entry.Offset >= uint.MaxValue)
                {
                    WriteLeInt(-1);
                }
                else
                {
                    WriteLeInt((int)entry.Offset);
                }

                if (name.Length > 0)
                {
                    baseOutputStream_.Write(name, 0, name.Length);
                }

                if (extra.Length > 0)
                {
                    baseOutputStream_.Write(extra, 0, extra.Length);
                }

                if (entryComment.Length > 0)
                {
                    baseOutputStream_.Write(entryComment, 0, entryComment.Length);
                }

                sizeEntries += ZipConstants.CentralHeaderBaseSize + name.Length + extra.Length + entryComment.Length;
            }

            using (var zhs = new ZipHelperStream(baseOutputStream_)) {
                zhs.WriteEndOfCentralDirectory(numEntries, sizeEntries, offset, zipComment);
            }

            entries = null;
        }
		/// <summary>
		/// Finishes the stream.  This will write the central directory at the
		/// end of the zip file and flush the stream.
		/// </summary>
		/// <remarks>
		/// This is automatically called when the stream is closed.
		/// </remarks>
		/// <exception cref="System.IO.IOException">
		/// An I/O error occurs.
		/// </exception>
		/// <exception cref="ZipException">
		/// Comment exceeds the maximum length<br/>
		/// Entry name exceeds the maximum length
		/// </exception>
		public override void Finish()
		{
			if (entries == null)  {
				return;
			}
			
			if (curEntry != null) {
				CloseEntry();
			}
			
			long numEntries = entries.Count;
			long sizeEntries = 0;
			
			foreach (ZipEntry entry in entries) {
				WriteLeInt(ZipConstants.CentralHeaderSignature); 
				WriteLeShort(ZipConstants.VersionMadeBy);
				WriteLeShort(entry.Version);
				WriteLeShort(entry.Flags);
				WriteLeShort((short)entry.CompressionMethodForHeader);
				WriteLeInt((int)entry.DosTime);
				WriteLeInt((int)entry.Crc);

				if ( entry.IsZip64Forced() || 
					(entry.CompressedSize >= uint.MaxValue) )
				{
					WriteLeInt(-1);
				}
				else {
					WriteLeInt((int)entry.CompressedSize);
				}

				if ( entry.IsZip64Forced() ||
					(entry.Size >= uint.MaxValue) )
				{
					WriteLeInt(-1);
				}
				else {
					WriteLeInt((int)entry.Size);
				}

				var name = ZipConstants.ConvertToArray(entry.Flags, entry.Name);
				
				if (name.Length > 0xffff) {
					throw new ZipException("Name too long.");
				}
				
				var ed = new ZipExtraData(entry.ExtraData);

				if ( entry.CentralHeaderRequiresZip64 ) {
					ed.StartNewEntry();
					if ( entry.IsZip64Forced() ||
						(entry.Size >= 0xffffffff) )
					{
						ed.AddLeLong(entry.Size);
					}

					if ( entry.IsZip64Forced() ||
						(entry.CompressedSize >= 0xffffffff) )
					{
						ed.AddLeLong(entry.CompressedSize);
					}

					if ( entry.Offset >= 0xffffffff )
					{
						ed.AddLeLong(entry.Offset);
					}

					ed.AddNewEntry(1);
				}
				else {
					ed.Delete(1);
				}

#if !NET_1_1 && !NETCF_2_0
				if (entry.AESKeySize > 0) {
					AddExtraDataAES(entry, ed);
				}
#endif
				var extra = ed.GetEntryData();
				
				var entryComment = 
					(entry.Comment != null) ? 
					ZipConstants.ConvertToArray(entry.Flags, entry.Comment) :
					new byte[0];

				if (entryComment.Length > 0xffff) {
					throw new ZipException("Comment too long.");
				}
				
				WriteLeShort(name.Length);
				WriteLeShort(extra.Length);
				WriteLeShort(entryComment.Length);
				WriteLeShort(0);	// disk number
				WriteLeShort(0);	// internal file attributes
									// external file attributes

				if (entry.ExternalFileAttributes != -1) {
					WriteLeInt(entry.ExternalFileAttributes);
				} else {
					if (entry.IsDirectory) {                         // mark entry as directory (from nikolam.AT.perfectinfo.com)
						WriteLeInt(16);
					} else {
						WriteLeInt(0);
					}
				}

				if ( entry.Offset >= uint.MaxValue ) {
					WriteLeInt(-1);
				}
				else {
					WriteLeInt((int)entry.Offset);
				}
				
				if ( name.Length > 0 ) {
					baseOutputStream_.Write(name,    0, name.Length);
				}

				if ( extra.Length > 0 ) {
					baseOutputStream_.Write(extra,   0, extra.Length);
				}

				if ( entryComment.Length > 0 ) {
					baseOutputStream_.Write(entryComment, 0, entryComment.Length);
				}

				sizeEntries += ZipConstants.CentralHeaderBaseSize + name.Length + extra.Length + entryComment.Length;
			}
			
			using ( var zhs = new ZipHelperStream(baseOutputStream_) ) {
				zhs.WriteEndOfCentralDirectory(numEntries, sizeEntries, offset, zipComment);
			}

			entries = null;
		}