public ArchiveManager(string path, ArchiveKey key, bool detect_version = true) { Path = path; Key = key; Stream = new ArchiveStream(path); if (detect_version) { Stream.Reopen(true); Stream.Seek(-4, SeekOrigin.End); short version = Stream.ReadInt16(); switch (version) { case 2: Version = ArchiveVersion.V2; break; case 3: Version = ArchiveVersion.V3; break; default: Utils.ShowMessage("Unknown archive type"); break; } Stream.Close(); } }
public UnsupportedArchiveException(ArchiveSignature signature, ArchiveVersion version) : base(string.Format( "Unsupported archive with signature: {0} ({1})", Enum.GetName(typeof(ArchiveSignature), signature), Enum.GetName(typeof(ArchiveVersion), version) )) { }
public void ArchiveItemVersions(Item[] versions) { if (versions.Length > 0) { var major = SitecoreVersion[0]; var minor = SitecoreVersion[1]; if (major <= 6 && minor < 6) { throw new Exception("Saving pruned item versions to the archive was a BETA feature (for Sitecore pre-6.6) that has been removed. If you need it back, use 'Version Pruner v1.2'"); } else { foreach (var v in versions) { var task = new ArchiveVersion(DateTime.Now) { ItemID = v.ID, DatabaseName = v.Database.Name, //By = "VersionPruner", Language = v.Language.Name, Version = v.Version.Number, ArchiveName = "archive" }; task.Execute(); } } } }
public async Task <Dictionary <string, bool> > VerifyChecksumsAsync(ArchiveVersion av, List <string> files) { return(await Task.Run(() => { return VerifyChecksums(av, files); })); }
private ArchiveMetadata(string terminalIdentification, IOption <Signature> previousRecordSignature, DateTime created, ArchiveVersion version, ArchiveType archiveType) { TerminalIdentification = terminalIdentification; PreviousRecordSignature = previousRecordSignature; Created = created; Version = version; ArchiveType = archiveType; }
/// <summary> /// Returns the all archive versions found within the given directory, including the directory itself /// </summary> /// <param name="path">Directory path to check</param> /// <returns>A list of archive version information objects</returns> public List <ArchiveVersion> GetArchiveVersions(string directory) { var dirs = new List <IDirectoryInfo>(); dirs.AddRange(_fileSystem.DirectoryInfo.FromDirectoryName(directory).EnumerateDirectories()); dirs.Add(_fileSystem.DirectoryInfo.FromDirectoryName(directory)); var archiveVersionDirectories = new List <string>(); var avList = new List <ArchiveVersion>(); foreach (var curDir in dirs) { // ArchiveVersionFolderIdType avFolder = new ArchiveVersionFolderIdType(); if (TryGetAvFolder(out ArchiveVersionInfo avFolder, curDir.ToString())) { avList.Add(ArchiveVersion.Create(avFolder, _fileSystem)); } } avList.Sort((x, y) => string.Compare(x.Info.Id, y.Info.Id)); return(avList); }
private void Initialize(Stream stream, bool ownsStream, ArchiveVersion version) { StreamStartPosition = stream.Position; Stream = stream; OwnsStream = ownsStream; EntryMap = new Dictionary <string, ArchiveEntry>(); if (version == ArchiveVersion.Autodetect) { Version = DetectVersion(Stream); } else { Version = version; } var reader = new ArchiveReader(stream, Version, true); foreach (var entry in reader.ReadEntries(true)) { EntryMap[entry.FileName] = entry; } }
/// <summary> /// Verifies the calculated checksums of archive version files against the expected values found in its index /// </summary> /// <param name="av">The archive version to be checked</param> /// <param name="includeDocuments">Indicate wether documents files should also be verified</param> /// <returns>A dictionary of (key)filepaths and (value)verification result</returns> public Dictionary <string, bool> VerifyChecksums(ArchiveVersion av, List <string> files) { long checkedFiles = 0; int failedChecks = 0; int notifyFrequency = (int)Math.Ceiling((decimal)files.Count() / 100); //We want to notify at least for each 1% of files processed var resultDict = new ConcurrentDictionary <string, bool>(); var expectedChecksums = av.GetChecksumDict(); // Sort files in descending order. This will ensure that table files are validated first, giving a shorter total runtime files.Sort((x, y) => string.Compare(y, x)); int threads = 0; if (NumberOfThreads == 0) { threads = Environment.ProcessorCount; } else { threads = NumberOfThreads; } Parallel.ForEach(files, new ParallelOptions { MaxDegreeOfParallelism = threads }, file => { int currentChecked = (Int32)Interlocked.Increment(ref checkedFiles); bool result = false; try { string relativeFilePath = av.GetRelativeFilePath(file); byte[] expectedCheckSum = expectedChecksums[relativeFilePath]; int retries = 0; while (retries < 3) { try { result = CalculateChecksum(file).SequenceEqual(expectedCheckSum); break; } catch (IOException) { retries++; } } } catch (InvalidOperationException e) { // Never check fileIndex.xml if (!file.EndsWith("fileIndex.xml")) { Console.WriteLine($"The file to check was not found in the expected checksum list: {e.Message}"); Interlocked.Increment(ref failedChecks); OnVerifyFailed(file); } else { result = true; } } catch (Exception e) { result = false; Console.WriteLine($"This file could not be checked: {file} -> {e.Message}"); Interlocked.Increment(ref failedChecks); OnVerifyFailed(file); } if (!resultDict.TryAdd(file, result)) { throw new InvalidOperationException($"Cannot process duplicate filepath! {file}"); } if (!result) { Interlocked.Increment(ref failedChecks); OnVerifyFailed(file); } if (currentChecked % notifyFrequency == 0) { OnFilesVerified(new FilesVerifiedEventArgs { ProcessedFiles = currentChecked, ErrorsCount = failedChecks }); } }); OnFilesVerified(new FilesVerifiedEventArgs { ProcessedFiles = (Int32)Interlocked.Read(ref checkedFiles), ErrorsCount = failedChecks }); return(resultDict.ToDictionary(x => x.Key, x => x.Value)); }
public ArchiveReader(Stream stream, ArchiveVersion version, bool leaveOpen = false) { Reader = new EndianBinaryReader(stream, Encoding.ASCII, leaveOpen, Endianness.LittleEndian); StringBuilder = new StringBuilder(); Version = version; }
public Archive(byte[] data, ArchiveVersion version = ArchiveVersion.Autodetect) { Initialize(new MemoryStream(data), true, version); }
public Archive(Stream stream, ArchiveVersion version = ArchiveVersion.Autodetect, bool ownsStream = true) { Initialize(stream, ownsStream, version); }
// // Ctors // public Archive(string filepath, ArchiveVersion version = ArchiveVersion.Autodetect) { Initialize(File.OpenRead(filepath), true, version); }
public static ITry <ReportedValue, IEnumerable <string> > Create(Dto.Archive archive, ArchiveVersion version) { var reportedValue = version.Match( ArchiveVersion.v100, _ => GetReportedValueV1(archive), ArchiveVersion.v400, _ => GetReportedValueV4(archive) ); return(reportedValue.Map(value => new ReportedValue(value))); }
/// <summary> /// Read the local file header from the input stream, assuming correctness /// </summary> /// <returns>Status of the underlying stream</returns> public ZipReturn ReadHeaderQuick() { try { // We assume that the file is torrentzip until proven otherwise _torrentZip = true; // Open the stream for reading BinaryReader br = new BinaryReader(_zipstream); // Set the position of the writer based on the entry information br.BaseStream.Seek((long)_relativeOffset, SeekOrigin.Begin); // If the first bytes aren't a local file header, log and return if (br.ReadUInt32() != Constants.LocalFileHeaderSignature) { return(ZipReturn.ZipLocalFileHeaderError); } // Now read in available information, ignoring unneeded _versionNeeded = (ArchiveVersion)br.ReadUInt16(); _generalPurposeBitFlag = (GeneralPurposeBitFlag)br.ReadUInt16(); // If the flag says there's no hash data, then we can't use quick mode if ((_generalPurposeBitFlag & GeneralPurposeBitFlag.ZeroedCRCAndSize) == GeneralPurposeBitFlag.ZeroedCRCAndSize) { return(ZipReturn.ZipCannotFastOpen); } _compressionMethod = (CompressionMethod)br.ReadUInt16(); _lastMod = br.ReadUInt32(); _crc = br.ReadUInt32(); _compressedSize = br.ReadUInt32(); _uncompressedSize = br.ReadUInt32(); ushort fileNameLength = br.ReadUInt16(); ushort extraFieldLength = br.ReadUInt16(); byte[] fileNameBytes = br.ReadBytes(fileNameLength); _fileName = ((_generalPurposeBitFlag & GeneralPurposeBitFlag.LanguageEncodingFlag) == 0 ? Encoding.GetEncoding(858).GetString(fileNameBytes) : Encoding.UTF8.GetString(fileNameBytes, 0, fileNameLength)); byte[] extraField = br.ReadBytes(extraFieldLength); /* * Full disclosure: this next section is in GordonJ's work but I honestly * have no idea everything that it does. It seems to do something to figure * out if it's Zip64, or possibly check for random things but it uses the * extra field for this, which I do not fully understand. It's copied in * its entirety below in the hope that it makes things better... */ _zip64 = false; int pos = 0; while (extraFieldLength > pos) { ushort type = BitConverter.ToUInt16(extraField, pos); pos += 2; ushort blockLength = BitConverter.ToUInt16(extraField, pos); pos += 2; switch (type) { case 0x0001: Zip64 = true; if (_uncompressedSize == 0xffffffff) { _uncompressedSize = BitConverter.ToUInt64(extraField, pos); pos += 8; } if (_compressedSize == 0xffffffff) { _compressedSize = BitConverter.ToUInt64(extraField, pos); pos += 8; } break; case 0x7075: pos += 1; uint nameCRC32 = BitConverter.ToUInt32(extraField, pos); pos += 4; CRC32 crcTest = new CRC32(); crcTest.SlurpBlock(fileNameBytes, 0, fileNameLength); uint fCRC = (uint)crcTest.Crc32Result; if (nameCRC32 != fCRC) { return(ZipReturn.ZipLocalFileHeaderError); } int charLen = blockLength - 5; FileName = Encoding.UTF8.GetString(extraField, pos, charLen); pos += charLen; break; default: pos += blockLength; break; } } // Set the position of the data _dataLocation = (ulong)_zipstream.Position; } catch { return(ZipReturn.ZipLocalFileHeaderError); } return(ZipReturn.ZipGood); }
/// <summary> /// Read the central directory entry from the input stream /// </summary> /// <returns>Status of the underlying stream</returns> public ZipReturn ReadCentralDirectory() { try { // Open the stream for reading BinaryReader br = new BinaryReader(_zipstream); // If the first bytes aren't a central directory header, log and return if (br.ReadUInt32() != Constants.CentralDirectoryHeaderSignature) { return(ZipReturn.ZipCentralDirError); } // Now read in available information, skipping the unnecessary _versionMadeBy = (ArchiveVersion)br.ReadUInt16(); _versionNeeded = (ArchiveVersion)br.ReadUInt16(); _generalPurposeBitFlag = (GeneralPurposeBitFlag)br.ReadUInt16(); _compressionMethod = (CompressionMethod)br.ReadUInt16(); // If we have an unsupported compression method, log and return if (_compressionMethod != CompressionMethod.Stored && _compressionMethod != CompressionMethod.Deflated) { return(ZipReturn.ZipCentralDirError); } // Keep reading available information, skipping the unnecessary _lastMod = br.ReadUInt32(); _crc = br.ReadUInt32(); _compressedSize = br.ReadUInt32(); _uncompressedSize = br.ReadUInt32(); // Now store some temp vars to find the filename, extra field, and comment ushort fileNameLength = br.ReadUInt16(); ushort extraFieldLength = br.ReadUInt16(); ushort fileCommentLength = br.ReadUInt16(); // Even more reading available information, skipping the unnecessary br.ReadUInt16(); // Disk number start _internalFileAttributes = (InternalFileAttributes)br.ReadUInt16(); _externalFileAttributes = br.ReadUInt32(); _relativeOffset = br.ReadUInt32(); byte[] fileNameBytes = br.ReadBytes(fileNameLength); _fileName = ((_generalPurposeBitFlag & GeneralPurposeBitFlag.LanguageEncodingFlag) == 0 ? Encoding.GetEncoding(858).GetString(fileNameBytes) : Encoding.UTF8.GetString(fileNameBytes, 0, fileNameLength)); _extraField = br.ReadBytes(extraFieldLength); _comment = br.ReadBytes(fileCommentLength); /* * Full disclosure: this next section is in GordonJ's work but I honestly * have no idea everything that it does. It seems to do something to figure * out if it's Zip64, or possibly check for random things but it uses the * extra field for this, which I do not fully understand. It's copied in * its entirety below in the hope that it makes things better... */ int pos = 0; while (extraFieldLength > pos) { ushort type = BitConverter.ToUInt16(_extraField, pos); pos += 2; ushort blockLength = BitConverter.ToUInt16(_extraField, pos); pos += 2; switch (type) { case 0x0001: Zip64 = true; if (UncompressedSize == 0xffffffff) { UncompressedSize = BitConverter.ToUInt64(_extraField, pos); pos += 8; } if (_compressedSize == 0xffffffff) { _compressedSize = BitConverter.ToUInt64(_extraField, pos); pos += 8; } if (_relativeOffset == 0xffffffff) { _relativeOffset = BitConverter.ToUInt64(_extraField, pos); pos += 8; } break; case 0x7075: //byte version = extraField[pos]; pos += 1; uint nameCRC32 = BitConverter.ToUInt32(_extraField, pos); pos += 4; CRC32 crcTest = new CRC32(); crcTest.SlurpBlock(fileNameBytes, 0, fileNameLength); uint fCRC = (uint)crcTest.Crc32Result; if (nameCRC32 != fCRC) { return(ZipReturn.ZipCentralDirError); } int charLen = blockLength - 5; _fileName = Encoding.UTF8.GetString(_extraField, pos, charLen); pos += charLen; break; default: pos += blockLength; break; } } } catch { return(ZipReturn.ZipCentralDirError); } return(ZipReturn.ZipGood); }
public static ITry <TaxSummary, IEnumerable <string> > Create(Dto.Archive archive, ArchiveVersion version) { return(version.Match( ArchiveVersion.v100, _ => GetV1TaxSummary(archive), ArchiveVersion.v400, _ => GetV4TaxSummary(archive) )); }