private static void ScanForExistingTargetBlocks(LocalRestoreDatabase database, byte[] blockbuffer, System.Security.Cryptography.HashAlgorithm blockhasher, System.Security.Cryptography.HashAlgorithm filehasher, Options options, RestoreResults result) { // Scan existing files for existing BLOCKS using(var blockmarker = database.CreateBlockMarker()) { var updateCount = 0L; foreach(var restorelist in database.GetExistingFilesWithBlocks()) { var rename = !options.Overwrite; var targetpath = restorelist.TargetPath; var targetfileid = restorelist.TargetFileID; var targetfilehash = restorelist.TargetHash; if (m_systemIO.FileExists(targetpath)) { try { if (result.TaskControlRendevouz() == TaskControlState.Stop) return; if (rename) filehasher.Initialize(); using(var file = m_systemIO.FileOpenReadWrite(targetpath)) using(var block = new Blockprocessor(file, blockbuffer)) foreach(var targetblock in restorelist.Blocks) { var size = block.Readblock(); if (size <= 0) break; if (size == targetblock.Size) { var key = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (key == targetblock.Hash) { blockmarker.SetBlockRestored(targetfileid, targetblock.Index, key, size); } } if (rename) filehasher.TransformBlock(blockbuffer, 0, size, blockbuffer, 0); } if (rename) { filehasher.TransformFinalBlock(blockbuffer, 0, 0); var filekey = Convert.ToBase64String(filehasher.Hash); if (filekey == targetfilehash) { result.AddVerboseMessage("Target file exists and is correct version: {0}", targetpath); rename = false; } else { // The new file will have none of the correct blocks, // even if the scanned file had some blockmarker.SetAllBlocksMissing(targetfileid); } } if (updateCount++ % 20 == 0) { blockmarker.UpdateProcessed(result.OperationProgressUpdater); if (result.TaskControlRendevouz() == TaskControlState.Stop) return; } } catch (Exception ex) { result.AddWarning(string.Format("Failed to read target file: \"{0}\", message: {1}", targetpath, ex.Message), ex); if (ex is System.Threading.ThreadAbortException) throw; } } else { result.AddVerboseMessage("Target file does not exist: {0}", targetpath); rename = false; } if (rename) { //Select a new filename var ext = m_systemIO.PathGetExtension(targetpath) ?? ""; if (!string.IsNullOrEmpty(ext) && !ext.StartsWith(".")) ext = "." + ext; // First we try with a simple date append, assuming that there are not many conflicts there var newname = m_systemIO.PathChangeExtension(targetpath, null) + "." + database.RestoreTime.ToLocalTime().ToString("yyyy-MM-dd"); var tr = newname + ext; var c = 0; while (m_systemIO.FileExists(tr) && c < 1000) { try { // If we have a file with the correct name, // it is most likely the file we want filehasher.Initialize(); string key; using(var file = m_systemIO.FileOpenReadWrite(tr)) key = Convert.ToBase64String(filehasher.ComputeHash(file)); if (key == targetfilehash) { blockmarker.SetAllBlocksRestored(targetfileid); break; } } catch(Exception ex) { result.AddWarning(string.Format("Failed to read candidate restore target {0}", tr), ex); } tr = newname + " (" + (c++).ToString() + ")" + ext; } newname = tr; result.AddVerboseMessage("Target file exists and will be restored to: {0}", newname); database.UpdateTargetPath(targetfileid, newname); } } blockmarker.UpdateProcessed(result.OperationProgressUpdater); blockmarker.Commit(result); } }
private static void ScanForExistingSourceBlocks(LocalRestoreDatabase database, Options options, byte[] blockbuffer, System.Security.Cryptography.HashAlgorithm hasher, RestoreResults result) { // Fill BLOCKS with data from known local source files using (var blockmarker = database.CreateBlockMarker()) { var updateCount = 0L; foreach (var restorelist in database.GetFilesAndSourceBlocks()) { var targetpath = restorelist.TargetPath; var targetfileid = restorelist.TargetFileID; var patched = false; try { if (result.TaskControlRendevouz() == TaskControlState.Stop) return; var folderpath = m_systemIO.PathGetDirectoryName(targetpath); if (!options.Dryrun && !m_systemIO.DirectoryExists(folderpath)) { result.AddWarning(string.Format("Creating missing folder {0} for file {1}", folderpath, targetpath), null); m_systemIO.DirectoryCreate(folderpath); } using (var file = options.Dryrun ? null : m_systemIO.FileOpenReadWrite(targetpath)) using (var block = new Blockprocessor(file, blockbuffer)) foreach (var targetblock in restorelist.Blocks) { if (!options.Dryrun) file.Position = targetblock.Offset; foreach (var source in targetblock.Blocksources) { try { if (result.TaskControlRendevouz() == TaskControlState.Stop) return; if (m_systemIO.FileExists(source.Path)) using (var sourcefile = m_systemIO.FileOpenRead(source.Path)) { sourcefile.Position = source.Offset; var size = sourcefile.Read(blockbuffer, 0, blockbuffer.Length); if (size == targetblock.Size) { var key = Convert.ToBase64String(hasher.ComputeHash(blockbuffer, 0, size)); if (key == targetblock.Hash) { patched = true; if (!options.Dryrun) file.Write(blockbuffer, 0, size); blockmarker.SetBlockRestored(targetfileid, targetblock.Index, key, targetblock.Size); break; } } } } catch (Exception ex) { result.AddWarning(string.Format("Failed to patch file: \"{0}\" with data from local file \"{1}\", message: {2}", targetpath, source.Path, ex.Message), ex); if (ex is System.Threading.ThreadAbortException) throw; } } } if (updateCount++ % 20 == 0) blockmarker.UpdateProcessed(result.OperationProgressUpdater); } catch (Exception ex) { result.AddWarning(string.Format("Failed to patch file: \"{0}\" with local data, message: {1}", targetpath, ex.Message), ex); } if (patched) result.AddVerboseMessage("Target file is patched with some local data: {0}", targetpath); else result.AddVerboseMessage("Target file is not patched any local data: {0}", targetpath); if (patched && options.Dryrun) result.AddDryrunMessage(string.Format("Would patch file with local data: {0}", targetpath)); } blockmarker.UpdateProcessed(result.OperationProgressUpdater); blockmarker.Commit(result); } }
private bool HandleFilesystemEntry(string path, System.IO.FileAttributes attributes) { // If we lost the connection, there is no point in keeping on processing if (m_backend.HasDied) throw m_backend.LastException; try { m_result.OperationProgressUpdater.StartFile(path, -1); if (m_backendLogFlushTimer < DateTime.Now) { m_backendLogFlushTimer = DateTime.Now.Add(FLUSH_TIMESPAN); m_backend.FlushDbMessages(m_database, null); } if ((attributes & FileAttributes.ReparsePoint) == FileAttributes.ReparsePoint) { if (m_options.SymlinkPolicy == Options.SymlinkStrategy.Ignore) { m_result.AddVerboseMessage("Ignoring symlink {0}", path); return false; } if (m_options.SymlinkPolicy == Options.SymlinkStrategy.Store) { Dictionary<string, string> metadata; if (m_options.StoreMetadata) { metadata = m_snapshot.GetMetadata(path); if (metadata == null) metadata = new Dictionary<string, string>(); if (!metadata.ContainsKey("CoreAttributes")) metadata["CoreAttributes"] = attributes.ToString(); if (!metadata.ContainsKey("CoreLastWritetime")) metadata["CoreLastWritetime"] = m_snapshot.GetLastWriteTimeUtc(path).Ticks.ToString(); } else { metadata = new Dictionary<string, string>(); } if (!metadata.ContainsKey("CoreSymlinkTarget")) metadata["CoreSymlinkTarget"] = m_snapshot.GetSymlinkTarget(path); var metahash = Utility.WrapMetadata(metadata, m_options); AddSymlinkToOutput(path, DateTime.UtcNow, metahash); m_result.AddVerboseMessage("Stored symlink {0}", path); //Do not recurse symlinks return false; } } if ((attributes & FileAttributes.Directory) == FileAttributes.Directory) { IMetahash metahash; if (m_options.StoreMetadata) { Dictionary<string, string> metadata = m_snapshot.GetMetadata(path); if (metadata == null) metadata = new Dictionary<string, string>(); if (!metadata.ContainsKey("CoreAttributes")) metadata["CoreAttributes"] = attributes.ToString(); if (!metadata.ContainsKey("CoreLastWritetime")) metadata["CoreLastWritetime"] = m_snapshot.GetLastWriteTimeUtc(path).Ticks.ToString(); metahash = Utility.WrapMetadata(metadata, m_options); } else { metahash = EMPTY_METADATA; } m_result.AddVerboseMessage("Adding directory {0}", path); AddFolderToOutput(path, DateTime.UtcNow, metahash); return true; } m_result.OperationProgressUpdater.UpdatefilesProcessed(++m_result.ExaminedFiles, m_result.SizeOfExaminedFiles); bool changed = false; // The time we scan DateTime scantime = DateTime.UtcNow; // Last scan time DateTime oldScanned; // Last file modification DateTime lastModified = m_snapshot.GetLastWriteTimeUtc(path); var oldId = m_database.GetFileEntry(path, out oldScanned); long filestatsize = m_snapshot.GetFileSize(path); if ((oldId < 0 || m_options.DisableFiletimeCheck || LocalDatabase.NormalizeDateTime(lastModified) >= oldScanned) && (m_options.SkipFilesLargerThan == long.MaxValue || m_options.SkipFilesLargerThan == 0 || filestatsize < m_options.SkipFilesLargerThan)) { m_result.AddVerboseMessage("Checking file for changes {0}", path); m_result.OpenedFiles++; long filesize = 0; IMetahash metahashandsize; if (m_options.StoreMetadata) { Dictionary<string, string> metadata = m_snapshot.GetMetadata(path); if (metadata == null) metadata = new Dictionary<string, string>(); if (!metadata.ContainsKey("CoreAttributes")) metadata["CoreAttributes"] = attributes.ToString(); if (!metadata.ContainsKey("CoreLastWritetime")) metadata["CoreLastWritetime"] = lastModified.Ticks.ToString(); metahashandsize = Utility.WrapMetadata(metadata, m_options); } else { metahashandsize = EMPTY_METADATA; } var hint = m_options.GetCompressionHintFromFilename(path); var oldHash = oldId < 0 ? null : m_database.GetFileHash(oldId); using (var blocklisthashes = new Library.Utility.FileBackedStringList()) using (var hashcollector = new Library.Utility.FileBackedStringList()) { using (var fs = new Blockprocessor(m_snapshot.OpenRead(path), m_blockbuffer)) { try { m_result.OperationProgressUpdater.StartFile(path, fs.Length); } catch (Exception ex) { m_result.AddWarning(string.Format("Failed to read file length for file {0}", path), ex); } int blocklistoffset = 0; m_filehasher.Initialize(); var offset = 0; var remaining = fs.Readblock(); do { var size = Math.Min(m_blocksize, remaining); m_filehasher.TransformBlock(m_blockbuffer, offset, size, m_blockbuffer, offset); var blockkey = m_blockhasher.ComputeHash(m_blockbuffer, offset, size); if (m_blocklistbuffer.Length - blocklistoffset < blockkey.Length) { var blkey = Convert.ToBase64String(m_blockhasher.ComputeHash(m_blocklistbuffer, 0, blocklistoffset)); blocklisthashes.Add(blkey); AddBlockToOutput(blkey, m_blocklistbuffer, 0, blocklistoffset, CompressionHint.Noncompressible, true); blocklistoffset = 0; } Array.Copy(blockkey, 0, m_blocklistbuffer, blocklistoffset, blockkey.Length); blocklistoffset += blockkey.Length; var key = Convert.ToBase64String(blockkey); AddBlockToOutput(key, m_blockbuffer, offset, size, hint, false); hashcollector.Add(key); filesize += size; m_result.OperationProgressUpdater.UpdateFileProgress(filesize); if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return false; remaining -= size; offset += size; if (remaining == 0) { offset = 0; remaining = fs.Readblock(); } } while (remaining > 0); //If all fits in a single block, don't bother with blocklists if (hashcollector.Count > 1) { var blkeyfinal = Convert.ToBase64String(m_blockhasher.ComputeHash(m_blocklistbuffer, 0, blocklistoffset)); blocklisthashes.Add(blkeyfinal); AddBlockToOutput(blkeyfinal, m_blocklistbuffer, 0, blocklistoffset, CompressionHint.Noncompressible, true); } } m_result.SizeOfOpenedFiles += filesize; m_filehasher.TransformFinalBlock(m_blockbuffer, 0, 0); var filekey = Convert.ToBase64String(m_filehasher.Hash); if (oldHash != filekey) { if (oldHash == null) m_result.AddVerboseMessage("New file {0}", path); else m_result.AddVerboseMessage("File has changed {0}", path); if (oldId < 0) { m_result.AddedFiles++; m_result.SizeOfAddedFiles += filesize; if (m_options.Dryrun) m_result.AddDryrunMessage(string.Format("Would add new file {0}, size {1}", path, Library.Utility.Utility.FormatSizeString(filesize))); } else { m_result.ModifiedFiles++; m_result.SizeOfModifiedFiles += filesize; if (m_options.Dryrun) m_result.AddDryrunMessage(string.Format("Would add changed file {0}, size {1}", path, Library.Utility.Utility.FormatSizeString(filesize))); } AddFileToOutput(path, filesize, scantime, metahashandsize, hashcollector, filekey, blocklisthashes); changed = true; } else { // When we write the file to output, update the scan time oldScanned = scantime; m_result.AddVerboseMessage("File has not changed {0}", path); } } } else { if (m_options.SkipFilesLargerThan == long.MaxValue || m_options.SkipFilesLargerThan == 0 || m_snapshot.GetFileSize(path) < m_options.SkipFilesLargerThan) m_result.AddVerboseMessage("Skipped checking file, because timestamp was not updated {0}", path); else m_result.AddVerboseMessage("Skipped checking file, because the size exceeds limit {0}", path); } if (!changed) AddUnmodifiedFile(oldId, oldScanned); m_result.SizeOfExaminedFiles += filestatsize; if (filestatsize != 0) m_result.OperationProgressUpdater.UpdatefilesProcessed(m_result.ExaminedFiles, m_result.SizeOfExaminedFiles); } catch (Exception ex) { m_result.AddWarning(string.Format("Failed to process path: {0}", path), ex); m_result.FilesWithError++; } return true; }
private static void ScanForExistingTargetBlocks(LocalRestoreDatabase database, byte[] blockbuffer, System.Security.Cryptography.HashAlgorithm blockhasher, System.Security.Cryptography.HashAlgorithm filehasher, Options options, RestoreResults result) { // Scan existing files for existing BLOCKS using(var blockmarker = database.CreateBlockMarker()) { var updateCount = 0L; foreach(var restorelist in database.GetExistingFilesWithBlocks()) { var rename = !options.Overwrite; var targetpath = restorelist.TargetPath; var targetfileid = restorelist.TargetFileID; var targetfilehash = restorelist.TargetHash; var targetfilelength = restorelist.Length; if (m_systemIO.FileExists(targetpath)) { try { if (result.TaskControlRendevouz() == TaskControlState.Stop) return; var currentfilelength = m_systemIO.FileLength(targetpath); var wasTruncated = false; // Adjust file length in overwrite mode if necessary (smaller is ok, will be extended during restore) // We do it before scanning for blocks. This allows full verification on files that only needs to // be truncated (i.e. forthwritten log files). if (!rename && currentfilelength > targetfilelength) { var currentAttr = m_systemIO.GetFileAttributes(targetpath); if ((currentAttr & System.IO.FileAttributes.ReadOnly) != 0) // clear readonly attribute { if (options.Dryrun) result.AddDryrunMessage(string.Format("Would reset read-only attribute on file: {0}", targetpath)); else m_systemIO.SetFileAttributes(targetpath, currentAttr & ~System.IO.FileAttributes.ReadOnly); } if (options.Dryrun) result.AddDryrunMessage(string.Format("Would truncate file '{0}' to length of {1:N0} bytes", targetpath, targetfilelength)); else { using (var file = m_systemIO.FileOpenWrite(targetpath)) file.SetLength(targetfilelength); currentfilelength = targetfilelength; } wasTruncated = true; } // If file size does not match and we have to rename on conflict, // the whole scan can be skipped here because all blocks have to be restored anyway. // For the other cases, we will check block and and file hashes and look for blocks // to be restored and files that can already be verified. if (!rename || currentfilelength == targetfilelength) { // a file hash for verification will only be necessary if the file has exactly // the wanted size so we have a chance to already mark the file as data-verified. bool calcFileHash = (currentfilelength == targetfilelength); if (calcFileHash) filehasher.Initialize(); using (var file = m_systemIO.FileOpenRead(targetpath)) using (var block = new Blockprocessor(file, blockbuffer)) foreach (var targetblock in restorelist.Blocks) { var size = block.Readblock(); if (size <= 0) break; //TODO: Handle Metadata bool blockhashmatch = false; if (size == targetblock.Size) { // Parallelize file hash calculation on rename. Running read-only on same array should not cause conflicts or races. // Actually, in future always calculate the file hash and mark the file data as already verified. System.Threading.Tasks.Task calcFileHashTask = null; if (calcFileHash) calcFileHashTask = System.Threading.Tasks.Task.Run( () => filehasher.TransformBlock(blockbuffer, 0, size, blockbuffer, 0)); var key = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (calcFileHashTask != null) calcFileHashTask.Wait(); // wait because blockbuffer will be overwritten. if (key == targetblock.Hash) { blockmarker.SetBlockRestored(targetfileid, targetblock.Index, key, size, false); blockhashmatch = true; } } if (calcFileHash && !blockhashmatch) // will not be necessary anymore { filehasher.TransformFinalBlock(blockbuffer, 0, 0); // So a new initialize will not throw calcFileHash = false; if (rename) // file does not match. So break. break; } } bool fullfilehashmatch = false; if (calcFileHash) // now check if files are identical { filehasher.TransformFinalBlock(blockbuffer, 0, 0); var filekey = Convert.ToBase64String(filehasher.Hash); fullfilehashmatch = (filekey == targetfilehash); } if (!rename && !fullfilehashmatch && !wasTruncated) // Reset read-only attribute (if set) to overwrite { var currentAttr = m_systemIO.GetFileAttributes(targetpath); if ((currentAttr & System.IO.FileAttributes.ReadOnly) != 0) { if (options.Dryrun) result.AddDryrunMessage(string.Format("Would reset read-only attribute on file: {0}", targetpath)); else m_systemIO.SetFileAttributes(targetpath, currentAttr & ~System.IO.FileAttributes.ReadOnly); } } if (fullfilehashmatch) { //TODO: Check metadata to trigger rename? If metadata changed, it will still be restored for the file in-place. blockmarker.SetFileDataVerified(targetfileid); result.AddVerboseMessage("Target file exists{1} and is correct version: {0}", targetpath, wasTruncated ? " (but was truncated)" : ""); rename = false; } else if (rename) { // The new file will have none of the correct blocks, // even if the scanned file had some blockmarker.SetAllBlocksMissing(targetfileid); } } if ((++updateCount) % 20 == 0) { blockmarker.UpdateProcessed(result.OperationProgressUpdater); if (result.TaskControlRendevouz() == TaskControlState.Stop) return; } } catch (Exception ex) { result.AddWarning(string.Format("Failed to read target file: \"{0}\", message: {1}", targetpath, ex.Message), ex); if (ex is System.Threading.ThreadAbortException) throw; } } else { result.AddVerboseMessage("Target file does not exist: {0}", targetpath); rename = false; } if (rename) { //Select a new filename var ext = m_systemIO.PathGetExtension(targetpath) ?? ""; if (!string.IsNullOrEmpty(ext) && !ext.StartsWith(".")) ext = "." + ext; // First we try with a simple date append, assuming that there are not many conflicts there var newname = m_systemIO.PathChangeExtension(targetpath, null) + "." + database.RestoreTime.ToLocalTime().ToString("yyyy-MM-dd", System.Globalization.CultureInfo.InvariantCulture); var tr = newname + ext; var c = 0; while (m_systemIO.FileExists(tr) && c < 1000) { try { // If we have a file with the correct name, // it is most likely the file we want filehasher.Initialize(); string key; using(var file = m_systemIO.FileOpenRead(tr)) key = Convert.ToBase64String(filehasher.ComputeHash(file)); if (key == targetfilehash) { //TODO: Also needs metadata check to make correct decision. // We stick to the policy to restore metadata in place, if data ok. So, metadata block may be restored. blockmarker.SetAllBlocksRestored(targetfileid, false); blockmarker.SetFileDataVerified(targetfileid); break; } } catch(Exception ex) { result.AddWarning(string.Format("Failed to read candidate restore target {0}", tr), ex); } tr = newname + " (" + (c++).ToString() + ")" + ext; } newname = tr; result.AddVerboseMessage("Target file exists and will be restored to: {0}", newname); database.UpdateTargetPath(targetfileid, newname); } } blockmarker.UpdateProcessed(result.OperationProgressUpdater); blockmarker.Commit(result); } }
private static void ScanForExistingTargetBlocks(LocalRestoreDatabase database, byte[] blockbuffer, System.Security.Cryptography.HashAlgorithm blockhasher, System.Security.Cryptography.HashAlgorithm filehasher, Options options, RestoreResults result) { // Scan existing files for existing BLOCKS using (var blockmarker = database.CreateBlockMarker()) { var updateCount = 0L; foreach (var restorelist in database.GetExistingFilesWithBlocks()) { var rename = !options.Overwrite; var targetpath = restorelist.TargetPath; var targetfileid = restorelist.TargetFileID; var targetfilehash = restorelist.TargetHash; if (m_systemIO.FileExists(targetpath)) { try { if (result.TaskControlRendevouz() == TaskControlState.Stop) { return; } if (rename) { filehasher.Initialize(); } using (var file = m_systemIO.FileOpenReadWrite(targetpath)) using (var block = new Blockprocessor(file, blockbuffer)) foreach (var targetblock in restorelist.Blocks) { var size = block.Readblock(); if (size <= 0) { break; } if (size == targetblock.Size) { var key = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size)); if (key == targetblock.Hash) { blockmarker.SetBlockRestored(targetfileid, targetblock.Index, key, size); } } if (rename) { filehasher.TransformBlock(blockbuffer, 0, size, blockbuffer, 0); } } if (rename) { filehasher.TransformFinalBlock(blockbuffer, 0, 0); var filekey = Convert.ToBase64String(filehasher.Hash); if (filekey == targetfilehash) { result.AddVerboseMessage("Target file exists and is correct version: {0}", targetpath); rename = false; } else { // The new file will have none of the correct blocks, // even if the scanned file had some blockmarker.SetAllBlocksMissing(targetfileid); } } if (updateCount++ % 20 == 0) { blockmarker.UpdateProcessed(result.OperationProgressUpdater); if (result.TaskControlRendevouz() == TaskControlState.Stop) { return; } } } catch (Exception ex) { result.AddWarning(string.Format("Failed to read target file: \"{0}\", message: {1}", targetpath, ex.Message), ex); if (ex is System.Threading.ThreadAbortException) { throw; } } } else { result.AddVerboseMessage("Target file does not exist: {0}", targetpath); rename = false; } if (rename) { //Select a new filename var ext = m_systemIO.PathGetExtension(targetpath) ?? ""; if (!string.IsNullOrEmpty(ext) && !ext.StartsWith(".")) { ext = "." + ext; } // First we try with a simple date append, assuming that there are not many conflicts there var newname = m_systemIO.PathChangeExtension(targetpath, null) + "." + database.RestoreTime.ToLocalTime().ToString("yyyy-MM-dd"); var tr = newname + ext; var c = 0; while (m_systemIO.FileExists(tr) && c < 1000) { try { // If we have a file with the correct name, // it is most likely the file we want filehasher.Initialize(); string key; using (var file = m_systemIO.FileOpenReadWrite(tr)) key = Convert.ToBase64String(filehasher.ComputeHash(file)); if (key == targetfilehash) { blockmarker.SetAllBlocksRestored(targetfileid); break; } } catch (Exception ex) { result.AddWarning(string.Format("Failed to read candidate restore target {0}", tr), ex); } tr = newname + " (" + (c++).ToString() + ")" + ext; } newname = tr; result.AddVerboseMessage("Target file exists and will be restored to: {0}", newname); database.UpdateTargetPath(targetfileid, newname); } } blockmarker.UpdateProcessed(result.OperationProgressUpdater); blockmarker.Commit(result); } }
private static void ScanForExistingSourceBlocks(LocalRestoreDatabase database, Options options, byte[] blockbuffer, System.Security.Cryptography.HashAlgorithm hasher, RestoreResults result) { // Fill BLOCKS with data from known local source files using (var blockmarker = database.CreateBlockMarker()) { var updateCount = 0L; foreach (var restorelist in database.GetFilesAndSourceBlocks()) { var targetpath = restorelist.TargetPath; var targetfileid = restorelist.TargetFileID; var patched = false; try { if (result.TaskControlRendevouz() == TaskControlState.Stop) { return; } var folderpath = m_systemIO.PathGetDirectoryName(targetpath); if (!options.Dryrun && !m_systemIO.DirectoryExists(folderpath)) { result.AddWarning(string.Format("Creating missing folder {0} for file {1}", folderpath, targetpath), null); m_systemIO.DirectoryCreate(folderpath); } using (var file = options.Dryrun ? null : m_systemIO.FileOpenReadWrite(targetpath)) using (var block = new Blockprocessor(file, blockbuffer)) foreach (var targetblock in restorelist.Blocks) { if (!options.Dryrun) { file.Position = targetblock.Offset; } foreach (var source in targetblock.Blocksources) { try { if (result.TaskControlRendevouz() == TaskControlState.Stop) { return; } if (m_systemIO.FileExists(source.Path)) { using (var sourcefile = m_systemIO.FileOpenRead(source.Path)) { sourcefile.Position = source.Offset; var size = sourcefile.Read(blockbuffer, 0, blockbuffer.Length); if (size == targetblock.Size) { var key = Convert.ToBase64String(hasher.ComputeHash(blockbuffer, 0, size)); if (key == targetblock.Hash) { patched = true; if (!options.Dryrun) { file.Write(blockbuffer, 0, size); } blockmarker.SetBlockRestored(targetfileid, targetblock.Index, key, targetblock.Size); break; } } } } } catch (Exception ex) { result.AddWarning(string.Format("Failed to patch file: \"{0}\" with data from local file \"{1}\", message: {2}", targetpath, source.Path, ex.Message), ex); if (ex is System.Threading.ThreadAbortException) { throw; } } } } if (updateCount++ % 20 == 0) { blockmarker.UpdateProcessed(result.OperationProgressUpdater); } } catch (Exception ex) { result.AddWarning(string.Format("Failed to patch file: \"{0}\" with local data, message: {1}", targetpath, ex.Message), ex); } if (patched) { result.AddVerboseMessage("Target file is patched with some local data: {0}", targetpath); } else { result.AddVerboseMessage("Target file is not patched any local data: {0}", targetpath); } if (patched && options.Dryrun) { result.AddDryrunMessage(string.Format("Would patch file with local data: {0}", targetpath)); } } blockmarker.UpdateProcessed(result.OperationProgressUpdater); blockmarker.Commit(result); } }