/// <summary> /// Gets <see cref="FileData"/> for all the files in a directory that match a /// specific filter. /// </summary> /// <param name="path">The path to search.</param> /// <param name="searchPattern">The search string to match against files in the path.</param> /// <returns>An object that implements <see cref="IEnumerable{FileData}"/> and /// allows you to enumerate the files in the given directory.</returns> /// <exception cref="ArgumentNullException"> /// <paramref name="path"/> is a null reference (Nothing in VB) /// </exception> /// <exception cref="ArgumentNullException"> /// <paramref name="filter"/> is a null reference (Nothing in VB) /// </exception> public static FileData[] GetFiles(string path, string searchPattern, SearchOption searchOption) { IEnumerable <FileData> e = FastDirectory.EnumerateFiles(path, searchPattern, searchOption); List <FileData> list = new List <FileData>(e); FileData[] retval = new FileData[list.Count]; list.CopyTo(retval); return(retval); }
void EFUScanFolderRecursive(StreamWriter newEFU, DirEntry dir) { if (Program.ShowProgress && dirnum++ % 100 == 0) { string log = $" Scanning: {dir.Path}"; if (log.Length > Console.BufferWidth - 1) { log = log.Substring(0, Console.BufferWidth - 4) + "..."; } Console.Write($"{log.PadRight(Console.BufferWidth - 1)}\r"); } int depth = dir.Path.Length < 3 ? 0 : dir.Path.Length - dir.Path.Substring(2).Replace("\\", "").Length; if (depth > maxDepth) { maxDepth = depth; } FileData[] contents = FastDirectory.GetFiles(dir.Path, "*", SearchOption.TopDirectoryOnly); var subs = contents.Where(e => e.Attributes.HasFlag(FileAttributes.Directory)).ToArray(); var files = contents.Where(e => !e.Attributes.HasFlag(FileAttributes.Directory)).ToArray(); if (Program.depthLimit > 0 && depth >= Program.depthLimit) { subs = new FileData[] { } } ; foreach (var f in files) { if (isIncluded(f.FullPath, false)) { newEFU.WriteLine($"\"{f.FullPath}\",{f.Size},{f.LastWriteTime.ToFileTime()},{f.CreationTime.ToFileTime()},{(int)f.Attributes}"); dir.Size += f.Size; totalSize += f.Size; fileCount++; } else { exFileCount++; exSize += f.Size; } } foreach (var s in subs) { if (dirIndex.ContainsKey(s.FullPath.ToLower())) // skip folders already indexed { continue; } if (isIncluded(s.FullPath, true)) { DirEntry sub = new DirEntry(dir, s) { Changed = true }; dir.Add(sub); dirIndex[s.FullPath.ToLower()] = sub; EFUScanFolderRecursive(newEFU, sub); //dir.Size += sub.Size; } else { exDirCount++; } } } // recursively update total folder size // before calling this, each folder size is assumed to be just the sum of sizes of the files on that folder (not including subfolders) long getSubSize(DirEntry dir) { if (dir.Contents != null) { foreach (var sub in dir.Contents) { if (sub.Exists) { dir.Size += getSubSize(sub); } } } return(dir.Size); } // writes folder info to EFU file void EFUFinalize(StreamWriter newEFU) { foreach (var dir in dirIndex.Values) { if (dir.Exists) { dirCount++; int depth = dir.Path.Length < 3 ? 0 : dir.Path.Length - dir.Path.Substring(2).Replace("\\", "").Length; if (depth > maxDepth) { maxDepth = depth; } newEFU.WriteLine($"\"{dir.Path}\",{dir.Size},{dir.Modified.ToFileTime()},{dir.Created.ToFileTime()},{(int)dir.Attributes}"); } } } // copies file entries of unmodified folders from existing EFU to new EFU void EFUReindexUnchangedEntries(StreamWriter newEFU) { long lines = 0; int lastpc = -1; using (StreamReader sr = new StreamReader(EFUpath, Encoding.UTF8, false, 1 << 20)) { sr.ReadLine(); // header while (!sr.EndOfStream) { string line = sr.ReadLine(); ++lines; int pc = (int)(100 * sr.BaseStream.Position / sr.BaseStream.Length); if (Program.ShowProgress && (lines % 10000 == 0 || pc != lastpc)) { Console.Write($" Done {pc}%\r"); } lastpc = pc; DirEntry file = ParseEntry(line, false); if (file.isFolder) { continue; } else { if (isIncluded(file.Path, false)) // process included/excluded files { var folder = Path.GetDirectoryName(file.Path); if (dirIndex.TryGetValue(folder.ToLower(), out var subdir) && subdir.Verified) { if (!subdir.Exists) { continue; } if (!subdir.Changed) { fileCount++; totalSize += file.Size; subdir.Size += file.Size; newEFU.WriteLine(line); } } } else { exFileCount++; exSize += file.Size; } } } } if (Program.ShowProgress) { Console.Write(" \r"); } } // loads an EFU file, keeping only directory info and stats (files are ignored) void EFULoad(bool Checked = false) { long lines = 0; int dirs = 0; int files = 0; long size = 0; int depth = 0; int lastpc = -1; using (StreamReader sr = new StreamReader(EFUpath, Encoding.UTF8, false, 1 << 20)) { sr.ReadLine(); // header while (!sr.EndOfStream) { string line = sr.ReadLine(); ++lines; int pc = (int)(100 * sr.BaseStream.Position / sr.BaseStream.Length); if (Program.ShowProgress && (lines % 10000 == 0 || pc != lastpc)) { Console.Write($" Loaded {pc}%\r"); } lastpc = pc; DirEntry dir = ParseEntry(line, true); // dirs only if (dir != null) { int fd = dir.Path.Length - dir.Path.Substring(1).Replace("\\", "").Length; if (fd > depth) { depth = fd; } dirs++; if (isIncluded(dir.Path, true)) // filter { dirIndex[dir.Path.ToLower()] = dir; DirEntry parDir = null; var parent = Path.GetDirectoryName(dir.Path); if (parent != null && dirIndex.TryGetValue(parent.ToLower(), out parDir)) { parDir.Add(dir); } if (parDir == null) { size += dir.Size; // root folder size } dir.Size = 0; // will be recalculated } else { exDirCount++; // size is added during elsewhere } } else { files++; } } } //if (Program.ShowProgress) Console.Write(" \r"); Console.WriteLine($" Contents: {Util.FormatSize(size)} in {files:n0} files, {dirs:n0} folders [{depth} depth]"); } // copies entries that pass exclude filter from existing EFU to new EFU void EFUFilter(StreamWriter newEFU) { long lines = 0; int lastpc = -1; using (StreamReader sr = new StreamReader(EFUpath, Encoding.UTF8, false, 1 << 20)) { sr.ReadLine(); // header while (!sr.EndOfStream) { string line = sr.ReadLine(); ++lines; int pc = (int)(100 * sr.BaseStream.Position / sr.BaseStream.Length); if (Program.ShowProgress && (lines % 10000 == 0 || pc != lastpc)) { Console.Write($" Done {pc}%\r"); } lastpc = pc; DirEntry file = ParseEntry(line, false); if (file.isFolder) { continue; } else { var folder = Path.GetDirectoryName(file.Path); if (folder != null && dirIndex.TryGetValue(folder.ToLower(), out var subdir)) { if (isIncluded(file.Path, file.isFolder)) { fileCount++; totalSize += file.Size; subdir.Size += file.Size; newEFU.WriteLine(line); continue; } } exFileCount++; exSize += file.Size; } } } if (Program.ShowProgress) { Console.Write(" \r"); } } // loads an EFU file, keeping only directory info and stats (files are ignored) void getStats() { long lines = 0; int lastpc = -1; using (StreamReader sr = new StreamReader(EFUpath, Encoding.UTF8, false, 1 << 20)) { sr.ReadLine(); // header while (!sr.EndOfStream) { string line = sr.ReadLine(); ++lines; int pc = (int)(100 * sr.BaseStream.Position / sr.BaseStream.Length); if (Program.ShowProgress && (lines % 10000 == 0 || pc != lastpc)) { Console.Write($" Loaded {pc}%\r"); } lastpc = pc; DirEntry dir = ParseEntry(line, false); // dirs only if (isIncluded(dir.Path, dir.isFolder)) { if (dir.isFolder) { dirCount++; int depth = dir.Path.Length < 3 ? 0 : dir.Path.Length - dir.Path.Substring(2).Replace("\\", "").Length; if (depth > maxDepth) { maxDepth = depth; } } else { fileCount++; totalSize += dir.Size; string ext = (Path.GetExtension(dir.Path) ?? "").ToLower(); if (extensionCount.TryGetValue(ext, out var count)) { extensionCount[ext] = new Tuple <int, long>(count.Item1 + 1, count.Item2 + dir.Size); } else { extensionCount[ext] = new Tuple <int, long>(1, dir.Size); } } } else { if (dir.isFolder) { exDirCount++; } else { exFileCount++; exSize += dir.Size; } } } } if (Program.ShowProgress) { Console.Write(" \r"); } } // uses string.split (2x faster than regex for this task) DirEntry ParseEntry(string line, bool dirsOnly = false) { if (dirsOnly && (line.EndsWith(",0") || line.EndsWith(",32"))) { return(null); } var items = line.Split(','); if (items.Length >= 5) { FileAttributes attr = (FileAttributes)int.Parse(items[items.Length - 1]); if (dirsOnly && !attr.HasFlag(FileAttributes.Directory)) { return(null); } string path = items.Length == 5 ? items[0].Trim('\"') : string.Join(",", items.Take(items.Length - 4).ToList()).Trim('\"'); long size = long.Parse(items[items.Length - 4]); DateTime mdate = DateTime.FromFileTime(long.Parse(items[items.Length - 3])); DateTime cdate = DateTime.FromFileTime(long.Parse(items[items.Length - 2])); return(new DirEntry(null, path, size, mdate, cdate, attr)); } return(null); } // get existing DirEntry or create new one DirEntry getDirEntry(string path) { string key = path?.ToLower(); if (dirIndex.TryGetValue(key, out DirEntry entry)) { return(entry); } string root = Path.GetPathRoot(path).TrimEnd('\\'); string dir = Path.GetDirectoryName(path)?.TrimEnd('\\') ?? ""; //string name = path == root ? root : Path.GetFileName(path); DirEntry de = new DirEntry(getDirEntry(dir), dir, 0, DateTime.MinValue, DateTime.MinValue, 0); dirIndex[key] = de; return(de); } }
void EFUScanFolderRecursive(StreamWriter newEFU, DirEntry dir) { if (Program.ShowProgress && dirnum++ % 100 == 0) { string log = $" Scanning: {dir.Path}"; if (log.Length > Console.BufferWidth - 1) { log = log.Substring(0, Console.BufferWidth - 4) + "..."; } Console.Write($"{log.PadRight(Console.BufferWidth - 1)}\r"); } int depth = dir.Path.Length < 3 ? 0 : dir.Path.Length - dir.Path.Substring(2).Replace("\\", "").Length; if (depth > maxDepth) { maxDepth = depth; } FileData[] contents = FastDirectory.GetFiles(dir.Path, "*.*", SearchOption.TopDirectoryOnly); var subs = contents.Where(e => e.Attributes.HasFlag(FileAttributes.Directory)).ToArray(); var files = contents.Where(e => !e.Attributes.HasFlag(FileAttributes.Directory)).ToArray(); foreach (var f in files) { if (isIncluded(f.FullPath, false)) { newEFU.WriteLine($"\"{f.FullPath}\",{f.Size},{f.LastWriteTime.ToFileTime()},{f.CreationTime.ToFileTime()},{(int)f.Attributes}"); dir.Size += f.Size; totalSize += f.Size; fileCount++; } else { exFileCount++; exSize += f.Size; } } foreach (var s in subs) { if (dirIndex.ContainsKey(s.FullPath.ToLower())) // skip folders already indexed { continue; } if (isIncluded(s.FullPath, true)) { DirEntry sub = new DirEntry(dir, s) { Changed = true }; dir.Add(sub); dirIndex[s.FullPath.ToLower()] = sub; EFUScanFolderRecursive(newEFU, sub); //dir.Size += sub.Size; } else { exDirCount++; } } }
/// <summary> /// Gets <see cref="FileData"/> for all the files in a directory that match a /// specific filter. /// </summary> /// <param name="path">The path to search.</param> /// <param name="searchPattern">The search string to match against files in the path.</param> /// <returns>An object that implements <see cref="IEnumerable{FileData}"/> and /// allows you to enumerate the files in the given directory.</returns> /// <exception cref="ArgumentNullException"> /// <paramref name="path"/> is a null reference (Nothing in VB) /// </exception> /// <exception cref="ArgumentNullException"> /// <paramref name="filter"/> is a null reference (Nothing in VB) /// </exception> public static IEnumerable <FileData> EnumerateFiles(string path, string searchPattern) { return(FastDirectory.EnumerateFiles(path, searchPattern, SearchOption.TopDirectoryOnly)); }
/// <summary> /// Gets <see cref="FileData"/> for all the files in a directory. /// </summary> /// <param name="path">The path to search.</param> /// <returns>An object that implements <see cref="IEnumerable{FileData}"/> and /// allows you to enumerate the files in the given directory.</returns> /// <exception cref="ArgumentNullException"> /// <paramref name="path"/> is a null reference (Nothing in VB) /// </exception> public static IEnumerable <FileData> EnumerateFiles(string path) { return(FastDirectory.EnumerateFiles(path, "*")); }