public static int PurgeFiles(List <string> args, Dictionary <string, string> options, Library.Utility.IFilter filter) { if (args.Count < 1) { return(PrintWrongNumberOfArguments(args, 1)); } var backend = args[0]; var paths = args.Skip(1).ToArray(); if (paths.Length > 0) { if (filter == null || filter.Empty) { filter = new Library.Utility.FilterExpression(paths); } else { Console.WriteLine("You cannot combine filters and paths on the commandline"); return(200); } } else if (filter == null || filter.Empty) { Console.WriteLine("You must provide either filename filters, or a list of paths to remove"); return(200); } using (var i = new Library.Main.Controller(args[0], options, new ConsoleOutput(options))) i.PurgeFiles(filter); return(0); }
public void Run(string[] sources, Library.Utility.IFilter filter) { var storeSymlinks = m_options.SymlinkPolicy == Duplicati.Library.Main.Options.SymlinkStrategy.Store; var sourcefilter = new Library.Utility.FilterExpression(sources, true); using (var snapshot = BackupHandler.GetSnapshot(sources, m_options, m_result)) { foreach (var path in snapshot.EnumerateFilesAndFolders(new BackupHandler.FilterHandler(snapshot, m_options.FileAttributeFilter, sourcefilter, filter, m_options.SymlinkPolicy, m_options.HardlinkPolicy, m_result).AttributeFilter)) { var fa = FileAttributes.Normal; try { fa = snapshot.GetAttributes(path); } catch { } if (storeSymlinks && ((fa & FileAttributes.ReparsePoint) == FileAttributes.ReparsePoint)) { m_result.AddVerboseMessage("Including symlink: {0}", path); } else if ((fa & FileAttributes.Directory) == FileAttributes.Directory) { m_result.AddVerboseMessage("Including folder: {0}", path); } else { m_result.FileCount++; var size = -1L; try { size = snapshot.GetFileSize(path); m_result.FileSize += size; } catch { } if (m_options.SkipFilesLargerThan == long.MaxValue || m_options.SkipFilesLargerThan == 0 || size < m_options.SkipFilesLargerThan) { m_result.AddVerboseMessage("Including file: {0} ({1})", path, size <= 0 ? "unknown" : Duplicati.Library.Utility.Utility.FormatSizeString(size)); } else { m_result.AddVerboseMessage("Excluding file due to size: {0} ({1})", path, size <= 0 ? "unknown" : Duplicati.Library.Utility.Utility.FormatSizeString(size)); } } } } }
public void Run(string[] sources, Library.Utility.IFilter filter) { var storeSymlinks = m_options.SymlinkPolicy == Duplicati.Library.Main.Options.SymlinkStrategy.Store; var sourcefilter = new Library.Utility.FilterExpression(sources, true); using(var snapshot = BackupHandler.GetSnapshot(sources, m_options, m_result)) { foreach(var path in snapshot.EnumerateFilesAndFolders(new BackupHandler.FilterHandler(snapshot, m_options.FileAttributeFilter, sourcefilter, filter, m_options.SymlinkPolicy, m_options.HardlinkPolicy, m_result).AttributeFilter)) { var fa = FileAttributes.Normal; try { fa = snapshot.GetAttributes(path); } catch { } if (storeSymlinks && ((fa & FileAttributes.ReparsePoint) == FileAttributes.ReparsePoint)) { m_result.AddVerboseMessage("Including symlink: {0}", path); } else if ((fa & FileAttributes.Directory) == FileAttributes.Directory) { m_result.AddVerboseMessage("Including folder: {0}", path); } else { m_result.FileCount++; var size = -1L; try { size = snapshot.GetFileSize(path); m_result.FileSize += size; } catch { } if (m_options.SkipFilesLargerThan == long.MaxValue || m_options.SkipFilesLargerThan == 0 || size < m_options.SkipFilesLargerThan) m_result.AddVerboseMessage("Including file: {0} ({1})", path, size <= 0 ? "unknown" : Duplicati.Library.Utility.Utility.FormatSizeString(size)); else m_result.AddVerboseMessage("Excluding file due to size: {0} ({1})", path, size <= 0 ? "unknown" : Duplicati.Library.Utility.Utility.FormatSizeString(size)); } } } }
public void Run(string[] sources, Library.Utility.IFilter filter) { var storeSymlinks = m_options.SymlinkPolicy == Duplicati.Library.Main.Options.SymlinkStrategy.Store; var sourcefilter = new Library.Utility.FilterExpression(sources, true); using (var snapshot = BackupHandler.GetSnapshot(sources, m_options)) { foreach (var path in new BackupHandler.FilterHandler(snapshot, m_options.FileAttributeFilter, sourcefilter, filter, m_options.SymlinkPolicy, m_options.HardlinkPolicy).EnumerateFilesAndFolders()) { var fa = FileAttributes.Normal; try { fa = snapshot.GetAttributes(path); } catch (Exception ex) { Logging.Log.WriteVerboseMessage(LOGTAG, "FailedAttributeRead", "Failed to read attributes from {0}: {1}", path, ex.Message); } if (storeSymlinks && snapshot.IsSymlink(path, fa)) { Logging.Log.WriteVerboseMessage(LOGTAG, "StoreSymlink", "Storing symlink: {0}", path); } else if ((fa & FileAttributes.Directory) == FileAttributes.Directory) { Logging.Log.WriteVerboseMessage(LOGTAG, "AddDirectory", "Including folder {0}", path); } else { m_result.FileCount++; var size = -1L; try { size = snapshot.GetFileSize(path); m_result.FileSize += size; } catch (Exception ex) { Logging.Log.WriteVerboseMessage(LOGTAG, "SizeReadFailed", "Failed to read length of file {0}: {1}", path, ex.Message); } if (m_options.SkipFilesLargerThan == long.MaxValue || m_options.SkipFilesLargerThan == 0 || size < m_options.SkipFilesLargerThan) { Logging.Log.WriteVerboseMessage(LOGTAG, "IncludeFile", "Including file: {0} ({1})", path, size < 0 ? "unknown" : Duplicati.Library.Utility.Utility.FormatSizeString(size)); } else { Logging.Log.WriteVerboseMessage(LOGTAG, "ExcludeLargeFile", "Excluding file due to size: {0} ({1})", path, size < 0 ? "unknown" : Duplicati.Library.Utility.Utility.FormatSizeString(size)); } } } } }
public void Run(IEnumerable <string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { var parsedfilter = new Library.Utility.FilterExpression(filterstrings); var simpleList = !(parsedfilter.Type == Library.Utility.FilterType.Simple || m_options.AllVersions); var filter = Library.Utility.JoinedFilterExpression.Join(parsedfilter, compositefilter); //Use a speedy local query if (!m_options.NoLocalDb && System.IO.File.Exists(m_options.Dbpath)) { using (var db = new Database.LocalListDatabase(m_options.Dbpath)) { m_result.SetDatabase(db); using (var filesets = db.SelectFileSets(m_options.Time, m_options.Version)) { if (parsedfilter.Type != Library.Utility.FilterType.Empty) { if (simpleList || (m_options.ListFolderContents && !m_options.AllVersions)) { filesets.TakeFirst(); } } IEnumerable <Database.LocalListDatabase.IFileversion> files; if (m_options.ListFolderContents) { files = filesets.SelectFolderContents(filter); } else if (m_options.ListPrefixOnly) { files = filesets.GetLargestPrefix(filter); } else if (parsedfilter.Type == Duplicati.Library.Utility.FilterType.Empty) { files = null; } else { files = filesets.SelectFiles(filter); } if (m_options.ListSetsOnly) { m_result.SetResult( filesets.QuickSets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), null ); } else { m_result.SetResult( filesets.Sets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), files == null ? null : (from n in files select(Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Path, n.Sizes.ToArray()))) .ToArray() ); } return; } } } m_result.AddMessage("No local database, accessing remote store"); //TODO: Add prefix and foldercontents if (m_options.ListFolderContents) { throw new Exception("Listing folder contents is not supported without a local database, consider using the \"repair\" option to rebuild the database."); } else if (m_options.ListPrefixOnly) { throw new Exception("Listing prefixes is not supported without a local database, consider using the \"repair\" option to rebuild the database."); } // Otherwise, grab info from remote location using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(tmpdb, "List", true)) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filteredList = ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) { throw new Exception("No filesets found on remote target"); } var numberSeq = CreateResultSequence(filteredList); if (parsedfilter.Type == Library.Utility.FilterType.Empty) { m_result.SetResult(numberSeq, null); m_result.EncryptedFiles = filteredList.Any(x => !string.IsNullOrWhiteSpace(x.Value.EncryptionModule)); return; } var firstEntry = filteredList[0].Value; filteredList.RemoveAt(0); Dictionary <string, List <long> > res; if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } using (var tmpfile = backend.Get(firstEntry.File.Name, firstEntry.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(firstEntry.File.Name), tmpfile, m_options)) if (simpleList) { m_result.SetResult( numberSeq.Take(1), (from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) orderby n.Path select new ListResultFile(n.Path, new long[] { n.Size })) .ToArray() ); return; } else { res = rd.Files .Where(x => Library.Utility.FilterExpression.Matches(filter, x.Path)) .ToDictionary( x => x.Path, y => { var lst = new List <long>(); lst.Add(y.Size); return(lst); }, Library.Utility.Utility.ClientFilenameStringComparer ); } long flindex = 1; foreach (var flentry in filteredList) { using (var tmpfile = backend.Get(flentry.Value.File.Name, flentry.Value.File == null ? -1 : flentry.Value.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(flentry.Value.CompressionModule, tmpfile, m_options)) { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { return; } foreach (var p in from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) select n) { List <long> lst; if (!res.TryGetValue(p.Path, out lst)) { lst = new List <long>(); res[p.Path] = lst; for (var i = 0; i < flindex; i++) { lst.Add(-1); } } lst.Add(p.Size); } foreach (var n in from i in res where i.Value.Count < flindex + 1 select i) { n.Value.Add(-1); } flindex++; } } m_result.SetResult( numberSeq, from n in res orderby n.Key select(Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Key, n.Value)) ); } }
public void TestEmptyFolderExclude() { var source = DATAFOLDER; // Top level folder with no contents Directory.CreateDirectory(Path.Combine(source, "empty-toplevel")); // Top level folder with contents in one leaf Directory.CreateDirectory(Path.Combine(source, "toplevel")); // Empty folder Directory.CreateDirectory(Path.Combine(source, "toplevel", "empty")); // Folder with an excluded file Directory.CreateDirectory(Path.Combine(source, "toplevel", "filteredempty")); // Folder with contents Directory.CreateDirectory(Path.Combine(source, "toplevel", "normal")); // Folder with excludefile Directory.CreateDirectory(Path.Combine(source, "toplevel", "excludefile")); // Write a file that we will use for exclude target File.WriteAllLines(Path.Combine(source, "toplevel", "excludefile", "exclude.me"), new string[] { }); File.WriteAllLines(Path.Combine(source, "toplevel", "excludefile", "anyfile.txt"), new string[] { "data" }); // Write a file that we will filter File.WriteAllLines(Path.Combine(source, "toplevel", "filteredempty", "myfile.txt"), new string[] { "data" }); // Write a file that we will not filter File.WriteAllLines(Path.Combine(source, "toplevel", "normal", "standard.txt"), new string[] { "data" }); // Get the default options var testopts = TestOptions; // Create a fileset with all data present using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that we have 4 files and 7 folders using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 7) { throw new Exception($"Initial condition not satisfied, found {folders} folders, but expected 7"); } if (files != 4) { throw new Exception($"Initial condition not satisfied, found {files} files, but expected 4"); } } // Toggle the exclude file, and build a new fileset System.Threading.Thread.Sleep(5000); testopts["ignore-filenames"] = "exclude.me"; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that we have 2 files and 6 folders after excluding the "excludefile" folder using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 6) { throw new Exception($"Initial condition not satisfied, found {folders} folders, but expected 6"); } if (files != 2) { throw new Exception($"Initial condition not satisfied, found {files} files, but expected 2"); } } // Toggle empty folder excludes, and run a new backup to remove them System.Threading.Thread.Sleep(5000); testopts["exclude-empty-folders"] = "true"; using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that the two empty folders are now removed using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 4) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 4"); } if (files != 2) { throw new Exception($"Empty not satisfied, found {files} files, but expected 2"); } } // Filter out one file and rerun the backup to exclude the folder System.Threading.Thread.Sleep(5000); var excludefilter = new Library.Utility.FilterExpression($"*{System.IO.Path.DirectorySeparatorChar}myfile.txt", false); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }, excludefilter); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check that the empty folder is now removed using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 3) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 3"); } if (files != 1) { throw new Exception($"Empty not satisfied, found {files} files, but expected 1"); } } // Delete the one remaining file and check that we only have the top-level folder in the set System.Threading.Thread.Sleep(5000); File.Delete(Path.Combine(source, "toplevel", "normal", "standard.txt")); using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts, null)) { IBackupResults backupResults = c.Backup(new string[] { DATAFOLDER }, excludefilter); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Check we now have only one folder and no files using (var c = new Library.Main.Controller("file://" + TARGETFOLDER, testopts.Expand(new { version = 0 }), null)) { var r = c.List("*"); Assert.AreEqual(0, r.Errors.Count()); Assert.AreEqual(0, r.Warnings.Count()); var folders = r.Files.Count(x => x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); var files = r.Files.Count(x => !x.Path.EndsWith(Util.DirectorySeparatorString, StringComparison.Ordinal)); if (folders != 1) { throw new Exception($"Empty not satisfied, found {folders} folders, but expected 1"); } if (files != 0) { throw new Exception($"Empty not satisfied, found {files} files, but expected 0"); } } }
public void Run(IEnumerable<string> filterstrings = null, Library.Utility.IFilter compositefilter = null) { var parsedfilter = new Library.Utility.FilterExpression(filterstrings); var simpleList = !(parsedfilter.Type == Library.Utility.FilterType.Simple || m_options.AllVersions); var filter = Library.Utility.JoinedFilterExpression.Join(parsedfilter, compositefilter); //Use a speedy local query if (!m_options.NoLocalDb && System.IO.File.Exists(m_options.Dbpath)) using(var db = new Database.LocalListDatabase(m_options.Dbpath)) { m_result.SetDatabase(db); using(var filesets = db.SelectFileSets(m_options.Time, m_options.Version)) { if (parsedfilter.Type != Library.Utility.FilterType.Empty) { if (simpleList || (m_options.ListFolderContents && !m_options.AllVersions)) filesets.TakeFirst(); } IEnumerable<Database.LocalListDatabase.IFileversion> files; if (m_options.ListFolderContents) files = filesets.SelectFolderContents(filter); else if (m_options.ListPrefixOnly) files = filesets.GetLargestPrefix(filter); else if (parsedfilter.Type == Duplicati.Library.Utility.FilterType.Empty) files = null; else files = filesets.SelectFiles(filter); if (m_options.ListSetsOnly) m_result.SetResult( filesets.QuickSets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), null ); else m_result.SetResult( filesets.Sets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(), files == null ? null : (from n in files select (Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Path, n.Sizes.ToArray()))) .ToArray() ); return; } } m_result.AddMessage("No local database, accessing remote store"); //TODO: Add prefix and foldercontents // Otherwise, grab info from remote location using (var tmpdb = new Library.Utility.TempFile()) using (var db = new Database.LocalDatabase(tmpdb, "List")) using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { m_result.SetDatabase(db); var filteredList = ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) throw new Exception("No filesets found on remote target"); var numberSeq = CreateResultSequence(filteredList); if (parsedfilter.Type == Library.Utility.FilterType.Empty) { m_result.SetResult(numberSeq, null); m_result.EncryptedFiles = filteredList.Any(x => !string.IsNullOrWhiteSpace(x.Value.EncryptionModule)); return; } var firstEntry = filteredList[0].Value; filteredList.RemoveAt(0); Dictionary<string, List<long>> res; if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; using (var tmpfile = backend.Get(firstEntry.File.Name, firstEntry.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(firstEntry.File.Name), tmpfile, m_options)) if (simpleList) { m_result.SetResult( numberSeq.Take(1), (from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) orderby n.Path select new ListResultFile(n.Path, new long[] { n.Size })) .ToArray() ); return; } else { res = rd.Files .Where(x => Library.Utility.FilterExpression.Matches(filter, x.Path)) .ToDictionary( x => x.Path, y => { var lst = new List<long>(); lst.Add(y.Size); return lst; }, Library.Utility.Utility.ClientFilenameStringComparer ); } long flindex = 1; foreach(var flentry in filteredList) using(var tmpfile = backend.Get(flentry.Value.File.Name, flentry.Value.File == null ? -1 : flentry.Value.File.Size, null)) using (var rd = new Volumes.FilesetVolumeReader(flentry.Value.CompressionModule, tmpfile, m_options)) { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; foreach(var p in from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) select n) { List<long> lst; if (!res.TryGetValue(p.Path, out lst)) { lst = new List<long>(); res[p.Path] = lst; for(var i = 0; i < flindex; i++) lst.Add(-1); } lst.Add(p.Size); } foreach(var n in from i in res where i.Value.Count < flindex + 1 select i) n.Value.Add(-1); flindex++; } m_result.SetResult( numberSeq, from n in res orderby n.Key select (Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Key, n.Value)) ); } }
public void Run(string[] sources, Library.Utility.IFilter filter) { var storeSymlinks = m_options.SymlinkPolicy == Duplicati.Library.Main.Options.SymlinkStrategy.Store; var sourcefilter = new Library.Utility.FilterExpression(sources, true); using (var snapshot = BackupHandler.GetSnapshot(sources, m_options)) using (new IsolatedChannelScope()) { var source = Operation.Backup.FileEnumerationProcess.Run(snapshot, m_options.FileAttributeFilter, sourcefilter, filter, m_options.SymlinkPolicy, m_options.HardlinkPolicy, m_options.ExcludeEmptyFolders, m_options.IgnoreFilenames, null, m_result.TaskReader); var sink = CoCoL.AutomationExtensions.RunTask( new { source = Operation.Backup.Channels.SourcePaths.ForRead }, async self => { while (true) { var path = await self.source.ReadAsync(); var fa = FileAttributes.Normal; try { fa = snapshot.GetAttributes(path); } catch (Exception ex) { Logging.Log.WriteVerboseMessage(LOGTAG, "FailedAttributeRead", "Failed to read attributes from {0}: {1}", path, ex.Message); } // Analyze symlinks var isSymlink = snapshot.IsSymlink(path, fa); string symlinkTarget = null; if (isSymlink) { try { symlinkTarget = snapshot.GetSymlinkTarget(path); } catch (Exception ex) { Logging.Log.WriteExplicitMessage(LOGTAG, "SymlinkTargetReadFailure", ex, "Failed to read symlink target for path: {0}", path); } } if (isSymlink && m_options.SymlinkPolicy == Options.SymlinkStrategy.Store && !string.IsNullOrWhiteSpace(symlinkTarget)) { // Skip stored symlinks continue; } // Go for the symlink target, as we know we follow symlinks if (!string.IsNullOrWhiteSpace(symlinkTarget)) { path = symlinkTarget; fa = FileAttributes.Normal; try { fa = snapshot.GetAttributes(path); } catch (Exception ex) { Logging.Log.WriteVerboseMessage(LOGTAG, "FailedAttributeRead", "Failed to read attributes from {0}: {1}", path, ex.Message); } } // Proceed with non-folders if (!((fa & FileAttributes.Directory) == FileAttributes.Directory)) { m_result.FileCount++; var size = -1L; try { size = snapshot.GetFileSize(path); m_result.FileSize += size; } catch (Exception ex) { Logging.Log.WriteVerboseMessage(LOGTAG, "SizeReadFailed", "Failed to read length of file {0}: {1}", path, ex.Message); } if (m_options.SkipFilesLargerThan == long.MaxValue || m_options.SkipFilesLargerThan == 0 || size < m_options.SkipFilesLargerThan) { Logging.Log.WriteVerboseMessage(LOGTAG, "IncludeFile", "Including file: {0} ({1})", path, size < 0 ? "unknown" : Duplicati.Library.Utility.Utility.FormatSizeString(size)); } else { Logging.Log.WriteVerboseMessage(LOGTAG, "ExcludeLargeFile", "Excluding file due to size: {0} ({1})", path, size < 0 ? "unknown" : Duplicati.Library.Utility.Utility.FormatSizeString(size)); } } } } ); System.Threading.Tasks.Task.WhenAll(source, sink).WaitForTaskOrThrow(); } }