public void AlphaFS_Directory_HasInheritedPermissions() { Console.WriteLine("Directory.HasInheritedPermissions()\n"); var searchPattern = Path.WildcardStarMatchAll; var searchOption = SearchOption.TopDirectoryOnly; var cnt = 0; UnitTestConstants.StopWatcher(true); foreach (var dir in Directory.EnumerateDirectories(UnitTestConstants.SysRoot, searchPattern, searchOption)) { try { var hasIp = Directory.HasInheritedPermissions(dir); if (hasIp) { Console.WriteLine("\t#{0:000}\t[{1}]\t\tDirectory has inherited permissions: [{2}]", ++cnt, hasIp, dir); } Assert.AreEqual(hasIp, HasInheritedPermissions(dir)); } catch (Exception ex) { Console.Write("\t#{0:000}\tCaught {1} for directory: [{2}]\t[{3}]\n", cnt, ex.GetType().FullName, dir, ex.Message.Replace(Environment.NewLine, " ")); } } Console.Write("\n{0}", UnitTestConstants.Reporter()); }
public void Directory_EnumerateDirectories_OK() { var dir = Path.GetDirectoryName(typeof(AlphaFSToys).Assembly.Location); Console.WriteLine($"AlphaFSToys directory name {dir} or {TestContext.CurrentContext.TestDirectory}"); Directory.SetCurrentDirectory($"{dir}\\..\\..\\.."); var dirs = Directory.EnumerateDirectories(".").ToArray(); var m = dirs.Contains("bin"); Console.WriteLine(string.Join(",", dirs)); Assert.That(dirs.Any(x => x.EndsWith(@"\bin"))); Assert.That(dirs.Any(x => x.EndsWith(@"\obj"))); }
public void GetDfsClientInfo() { Console.WriteLine("Network.Host.GetDfsClientInfo()"); int cnt = 0; StopWatcher(true); try { foreach (string dfsLink in Host.EnumerateDomainDfsRoot()) { try { foreach (string dir in Directory.EnumerateDirectories(dfsLink)) { Console.Write("\n#{0:000}\tDFS Target Directory: [{1}]\n", ++cnt, dir); StopWatcher(true); Dump(Host.GetDfsClientInfo(dir).NumberOfStorages.First(), -10); Console.Write("\n\t{0}\n", Reporter(true)); break; } } catch (NetworkInformationException ex) { Console.WriteLine("\n\tNetworkInformationException #1: [{0}]", ex.Message.Replace(Environment.NewLine, " ")); } catch (Exception ex) { Console.WriteLine("\n\tException (1): [{0}]", ex.Message.Replace(Environment.NewLine, " ")); } } } catch (NetworkInformationException ex) { Console.WriteLine("\n\tNetworkInformationException #2: [{0}]", ex.Message.Replace(Environment.NewLine, " ")); } catch (Exception ex) { Console.WriteLine("\n\tException (2): [{0}]", ex.Message.Replace(Environment.NewLine, " ")); } Console.WriteLine("\n\n\t{0}", Reporter(true)); Assert.IsTrue(cnt > 0, "Nothing was enumerated."); Console.WriteLine(); }
/// <summary> /// Generate a list of immediate children from the current folder /// </summary> /// <param name="omitFromScan">Hash representing the hashes that should be skipped</param> /// <param name="date">True if entry dates should be included, false otherwise (default)</param> /// <returns>List of BaseFile objects representing the found data</returns> /// <remarks>TODO: All instances of Hash.DeepHashes should be made into 0x0 eventually</remarks> public virtual List <BaseFile> GetChildren(Hash omitFromScan = Hash.DeepHashes, bool date = false) { if (_children == null || _children.Count == 0) { _children = new List <BaseFile>(); foreach (string file in Directory.EnumerateFiles(_filename, "*", SearchOption.TopDirectoryOnly)) { BaseFile nf = Utilities.GetFileInfo(file, omitFromScan: omitFromScan, date: date); _children.Add(nf); } foreach (string dir in Directory.EnumerateDirectories(_filename, "*", SearchOption.TopDirectoryOnly)) { Folder fl = new Folder(dir); _children.Add(fl); } } return(_children); }
private static void DeleteDirectory(string path) { Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories) .DoProgress("Cleaning VFS Files", file => { try { var fi = new FileInfo(file); fi.Attributes &= ~FileAttributes.ReadOnly; File.Delete(file); } catch (Exception ex) { Utils.Log(ex.ToString()); } }); Directory.EnumerateDirectories(path, DirectoryEnumerationOptions.Recursive) .OrderByDescending(d => d.Length) .DoProgress("Cleaning VFS Folders", folder => { try { if (!Directory.Exists(folder)) { return; } var di = new DirectoryInfo(folder); di.Attributes &= ~FileAttributes.ReadOnly; Directory.Delete(path, true); } catch (Exception ex) { Utils.Log(ex.ToString()); } }); }
protected override async Task <ExitCode> Run() { var modListPath = (AbsolutePath)Modlist; if (modListPath.Extension != Consts.ModListExtension && modListPath.FileName != (RelativePath)"modlist.txt") { return(CLIUtils.Exit($"The file {Modlist} is not a valid modlist file!", ExitCode.BadArguments)); } if (Copy && Move) { return(CLIUtils.Exit("You can't set both copy and move flags!", ExitCode.BadArguments)); } var isModlist = modListPath.Extension == Consts.ModListExtension; var list = new List <TransferFile>(); if (isModlist) { ModList modlist; try { modlist = AInstaller.LoadFromFile(modListPath); } catch (Exception e) { return(CLIUtils.Exit($"Error while loading the Modlist!\n{e}", ExitCode.Error)); } if (modlist == null) { return(CLIUtils.Exit("The Modlist could not be loaded!", ExitCode.Error)); } CLIUtils.Log($"Modlist contains {modlist.Archives.Count} archives."); modlist.Archives.Do(a => { var inputPath = Path.Combine(Input, a.Name); var outputPath = Path.Combine(Output, a.Name); if (!File.Exists(inputPath)) { CLIUtils.Log($"File {inputPath} does not exist, skipping."); return; } CLIUtils.Log($"Adding {inputPath} to the transfer list."); list.Add(new TransferFile(inputPath, outputPath)); var metaInputPath = Path.Combine(inputPath, ".meta"); var metaOutputPath = Path.Combine(outputPath, ".meta"); if (File.Exists(metaInputPath)) { CLIUtils.Log($"Found meta file {metaInputPath}"); if (IncludeMeta) { CLIUtils.Log($"Adding {metaInputPath} to the transfer list."); list.Add(new TransferFile(metaInputPath, metaOutputPath)); } else { CLIUtils.Log($"Meta file {metaInputPath} will be ignored."); } } else { CLIUtils.Log($"Found no meta file for {inputPath}"); if (IncludeMeta) { if (string.IsNullOrWhiteSpace(a.Meta)) { CLIUtils.Log($"Meta for {a.Name} is empty, this should not be possible but whatever."); return; } CLIUtils.Log("Adding meta from archive info the transfer list"); list.Add(new TransferFile(a.Meta, metaOutputPath, true)); } else { CLIUtils.Log($"Meta will be ignored for {a.Name}"); } } }); } else { if (!Directory.Exists(Mods)) { return(CLIUtils.Exit($"Mods directory {Mods} does not exist!", ExitCode.BadArguments)); } CLIUtils.Log($"Reading modlist.txt from {Modlist}"); string[] modlist = File.ReadAllLines(Modlist); if (modlist == null || modlist.Length == 0) { return(CLIUtils.Exit($"Provided modlist.txt file at {Modlist} is empty or could not be read!", ExitCode.BadArguments)); } var mods = modlist.Where(s => s.StartsWith("+")).Select(s => s.Substring(1)).ToHashSet(); if (mods.Count == 0) { return(CLIUtils.Exit("Counted mods from modlist.txt are 0!", ExitCode.BadArguments)); } CLIUtils.Log($"Found {mods.Count} mods in modlist.txt"); var downloads = new HashSet <string>(); Directory.EnumerateDirectories(Mods, "*", SearchOption.TopDirectoryOnly) .Where(d => mods.Contains(Path.GetRelativePath(Path.GetDirectoryName(d), d))) .Do(d => { var meta = Path.Combine(d, "meta.ini"); if (!File.Exists(meta)) { CLIUtils.Log($"Mod meta file {meta} does not exist, skipping"); return; } string[] ini = File.ReadAllLines(meta); if (ini == null || ini.Length == 0) { CLIUtils.Log($"Mod meta file {meta} could not be read or is empty!"); return; } ini.Where(i => !string.IsNullOrWhiteSpace(i) && i.StartsWith("installationFile=")) .Select(i => i.Replace("installationFile=", "")) .Do(i => { CLIUtils.Log($"Found installationFile {i}"); downloads.Add(i); }); }); CLIUtils.Log($"Found {downloads.Count} installationFiles from mod metas."); Directory.EnumerateFiles(Input, "*", SearchOption.TopDirectoryOnly) .Where(f => downloads.Contains(Path.GetFileNameWithoutExtension(f))) .Do(f => { CLIUtils.Log($"Found archive {f}"); var outputPath = Path.Combine(Output, Path.GetFileName(f)); CLIUtils.Log($"Adding {f} to the transfer list"); list.Add(new TransferFile(f, outputPath)); var metaInputPath = Path.Combine(f, ".meta"); if (File.Exists(metaInputPath)) { CLIUtils.Log($"Found meta file for {f} at {metaInputPath}"); if (IncludeMeta) { var metaOutputPath = Path.Combine(outputPath, ".meta"); CLIUtils.Log($"Adding {metaInputPath} to the transfer list."); list.Add(new TransferFile(metaInputPath, metaOutputPath)); } else { CLIUtils.Log("Meta file will be ignored"); } } else { CLIUtils.Log($"Found no meta file for {f}"); } }); } CLIUtils.Log($"Transfer list contains {list.Count} items"); var success = 0; var failed = 0; var skipped = 0; list.Do(f => { if (File.Exists(f.Output)) { if (Overwrite) { CLIUtils.Log($"Output file {f.Output} already exists, it will be overwritten"); if (f.IsMeta || Move) { CLIUtils.Log($"Deleting file at {f.Output}"); try { File.Delete(f.Output); } catch (Exception e) { CLIUtils.Log($"Could not delete file {f.Output}!\n{e}"); failed++; } } } else { CLIUtils.Log($"Output file {f.Output} already exists, skipping"); skipped++; return; } } if (f.IsMeta) { CLIUtils.Log($"Writing meta data to {f.Output}"); try { File.WriteAllText(f.Output, f.Input, Encoding.UTF8); success++; } catch (Exception e) { CLIUtils.Log($"Error while writing meta data to {f.Output}!\n{e}"); failed++; } } else { if (Copy) { CLIUtils.Log($"Copying file {f.Input} to {f.Output}"); try { File.Copy(f.Input, f.Output, Overwrite ? CopyOptions.None : CopyOptions.FailIfExists, CopyMoveProgressHandler, null); success++; } catch (Exception e) { CLIUtils.Log($"Error while copying file {f.Input} to {f.Output}!\n{e}"); failed++; } } else if (Move) { CLIUtils.Log($"Moving file {f.Input} to {f.Output}"); try { File.Move(f.Input, f.Output, Overwrite ? MoveOptions.ReplaceExisting : MoveOptions.None, CopyMoveProgressHandler, null); success++; } catch (Exception e) { CLIUtils.Log($"Error while moving file {f.Input} to {f.Output}!\n{e}"); failed++; } } } }); CLIUtils.Log($"Skipped transfers: {skipped}"); CLIUtils.Log($"Failed transfers: {failed}"); CLIUtils.Log($"Successful transfers: {success}"); return(0); }
private static IEnumerable <string> EnumerateDirectoriesImpl(string path, FileFilter filter, int startDepth, Gitignore gitignore) { var dirOptions = baseDirOptions; if (filter.IncludeSubfolders) { dirOptions |= DirectoryEnumerationOptions.Recursive; } if (filter.FollowSymlinks) { dirOptions &= ~DirectoryEnumerationOptions.SkipReparsePoints; } DirectoryEnumerationFilters dirFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); }, RecursionFilter = fsei => { if (gitignore != null && gitignore.Directories.Contains(fsei.FullPath)) { return(false); } if (!filter.IncludeHidden && fsei.IsHidden) { return(false); } if (filter.MaxSubfolderDepth >= 0) { int depth = GetDepth(new DirectoryInfo(fsei.FullPath)); if (depth - startDepth > filter.MaxSubfolderDepth) { return(false); } } if (filter.UseGitIgnore && fsei.FileName == ".git") { return(false); } return(true); }, InclusionFilter = fsei => { if (gitignore != null && gitignore.Directories.Contains(fsei.FullPath)) { return(false); } if (!filter.IncludeHidden && fsei.IsHidden) { return(false); } if (filter.MaxSubfolderDepth >= 0) { int depth = GetDepth(new DirectoryInfo(fsei.FullPath)); if (depth - startDepth > filter.MaxSubfolderDepth) { return(false); } } if (filter.UseGitIgnore && fsei.FileName == ".git") { return(false); } return(true); }, }; return(Directory.EnumerateDirectories(path, dirOptions, dirFilters, PathFormat.FullPath)); }
/// <summary> /// The user may already have some files in the OutputFolder. If so we can go through these and /// figure out which need to be updated, deleted, or left alone /// </summary> public async Task OptimizeModlist() { Utils.Log("Optimizing ModList directives"); // Clone the ModList so our changes don't modify the original data ModList = ModList.Clone(); var indexed = ModList.Directives.ToDictionary(d => d.To); UpdateTracker.NextStep("Looking for files to delete"); await Directory.EnumerateFiles(OutputFolder, "*", DirectoryEnumerationOptions.Recursive) .PMap(Queue, UpdateTracker, f => { var relative_to = f.RelativeTo(OutputFolder); Utils.Status($"Checking if ModList file {relative_to}"); if (indexed.ContainsKey(relative_to) || f.IsInPath(DownloadFolder)) { return; } Utils.Log($"Deleting {relative_to} it's not part of this ModList"); File.Delete(f); }); UpdateTracker.NextStep("Looking for unmodified files"); (await indexed.Values.PMap(Queue, UpdateTracker, d => { // Bit backwards, but we want to return null for // all files we *want* installed. We return the files // to remove from the install list. Status($"Optimizing {d.To}"); var path = Path.Combine(OutputFolder, d.To); if (!File.Exists(path)) { return(null); } var fi = new FileInfo(path); if (fi.Length != d.Size) { return(null); } return(path.FileHash() == d.Hash ? d : null); })) .Where(d => d != null) .Do(d => indexed.Remove(d.To)); Utils.Log("Cleaning empty folders"); var expectedFolders = indexed.Keys // We ignore the last part of the path, so we need a dummy file name .Append(Path.Combine(DownloadFolder, "_")) .SelectMany(path => { // Get all the folders and all the folder parents // so for foo\bar\baz\qux.txt this emits ["foo", "foo\\bar", "foo\\bar\\baz"] var split = path.Split('\\'); return(Enumerable.Range(1, split.Length - 1).Select(t => string.Join("\\", split.Take(t)))); }).Distinct() .Select(p => Path.Combine(OutputFolder, p)) .ToHashSet(); try { Directory.EnumerateDirectories(OutputFolder, DirectoryEnumerationOptions.Recursive) .Where(p => !expectedFolders.Contains(p)) .OrderByDescending(p => p.Length) .Do(Utils.DeleteDirectory); } catch (Exception) { // ignored because it's not worth throwing a fit over Utils.Log("Error when trying to clean empty folders. This doesn't really matter."); } UpdateTracker.NextStep("Updating ModList"); Utils.Log($"Optimized {ModList.Directives.Count} directives to {indexed.Count} required"); var requiredArchives = indexed.Values.OfType <FromArchive>() .GroupBy(d => d.ArchiveHashPath[0]) .Select(d => d.Key) .ToHashSet(); ModList.Archives = ModList.Archives.Where(a => requiredArchives.Contains(a.Hash)).ToList(); ModList.Directives = indexed.Values.ToList(); }
private static IEnumerable <string> EnumerateFilesExcludeHidden(string path, IList <string> patterns, bool recursive) { // when checking for hidden directories, enumerate the directories separately from files to check for hidden flag on directories DirectoryInfo di = new DirectoryInfo(path); // the root of the drive has the hidden attribute set, so don't stop on this hidden directory if (di.Attributes.HasFlag(FileAttributes.Hidden) && (di.Root != di)) { yield break; } var dirOptions = baseDirOptions; if (recursive) { dirOptions |= DirectoryEnumerationOptions.Recursive; } DirectoryEnumerationFilters dirFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); } }; dirFilters.InclusionFilter = fsei => { return(!fsei.IsHidden); }; DirectoryEnumerationFilters fileFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); } }; bool includeAllFiles = patterns.Count == 0 || (patterns.Count == 1 && (patterns[0] == "*.*" || patterns[0] == "*")); if (includeAllFiles) { fileFilters.InclusionFilter = fsei => { return(!fsei.IsHidden); }; } else { fileFilters.InclusionFilter = fsei => { if (fsei.IsHidden) { return(false); } foreach (string pattern in patterns) { if (WildcardMatch(fsei.FileName, pattern, true)) { return(true); } else if (pattern == "*.doc" && WildcardMatch(fsei.FileName, "*.doc*", true)) { return(true); } else if (pattern == "*.xls" && WildcardMatch(fsei.FileName, "*.xls*", true)) { return(true); } } return(false); }; } IEnumerable <string> directories = new string[] { path }; if (recursive) { directories = directories.Concat(Directory.EnumerateDirectories(path, dirOptions, dirFilters, PathFormat.FullPath)); } foreach (var directory in directories) { IEnumerable <string> matches = Directory.EnumerateFiles(directory, baseFileOptions, fileFilters, PathFormat.FullPath); foreach (var file in matches) { yield return(file); } } }