private static IEnumerable <string> FindLocaleFiles(string directoryPath) { DirectoryEnumerationFilters filters = new DirectoryEnumerationFilters() { InclusionFilter = f => { return(f.Extension.Equals("lsb", StringComparison.OrdinalIgnoreCase) == true || f.Extension.Equals("lsj", StringComparison.OrdinalIgnoreCase) == true); }, ErrorFilter = delegate(int errorCode, string errorMessage, string pathProcessed) { var gotException = errorCode == 5; if (gotException) { Log.Here().Error($"Error reading file at '{pathProcessed}': [{errorCode}]({errorMessage})"); } return(gotException); }, RecursionFilter = f => { return(true); } }; return(Directory.EnumerateFiles(directoryPath, DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.Recursive, filters)); }
private static IEnumerable <string> EnumerateAllFiles(string path, IList <string> patterns, bool includeArchive, bool recursive, bool followSymlinks) { // without filters, just enumerate files, which is faster var fileOptions = baseFileOptions; if (recursive) { fileOptions |= DirectoryEnumerationOptions.Recursive; } if (followSymlinks) { fileOptions &= ~DirectoryEnumerationOptions.SkipReparsePoints; } DirectoryEnumerationFilters fileFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); } }; bool includeAllFiles = patterns.Count == 0 || (patterns.Count == 1 && (patterns[0] == "*.*" || patterns[0] == "*")); if (!includeAllFiles) { fileFilters.InclusionFilter = fsei => { foreach (string pattern in patterns) { if (WildcardMatch(fsei.FileName, pattern, true)) { return(true); } } if (includeArchive) { foreach (string pattern in ArchiveDirectory.Patterns) { if (WildcardMatch(fsei.FileName, pattern, true)) { return(true); } } } return(false); }; } return(Directory.EnumerateFiles(path, fileOptions, fileFilters, PathFormat.FullPath)); }
private static List <string> GetRecycleBinFiles(string dir) { var files = new List <string>(); Privilege[] privs = { Privilege.EnableDelegation, Privilege.Impersonate, Privilege.Tcb }; using (new PrivilegeEnabler(Privilege.Backup, privs)) { var filters = new DirectoryEnumerationFilters { // Used to abort the enumeration. // CancellationToken = cancelSource.Token, // Filter to decide whether to recurse into subdirectories. RecursionFilter = entryInfo => { if (!entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink) { return(true); } return(false); }, // Filter to process Exception handling. ErrorFilter = delegate(int errorCode, string errorMessage, string pathProcessed) { _logger.Error($"Error accessing '{pathProcessed}'. Error: {errorMessage}"); // Return true to continue, false to throw the Exception. return(true); }, // Filter to in-/exclude file system entries during the enumeration. InclusionFilter = entryInfo => { if (entryInfo.FileName == "INFO2" || entryInfo.FileName.StartsWith("$I")) { _logger.Debug($"Found match: '{entryInfo.FullPath}'"); return(true); } return(false); } }; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.SkipReparsePoints; files.AddRange(Directory.EnumerateFileSystemEntryInfos <string>(dir, dirEnumOptions, filters).Where(File.Exists)); } return(files); }
private static IEnumerable <string> EnumerateFilesIncludeHidden(string path, IList <string> patterns, bool recursive) { // when not checking for hidden directories or files, just enumerate files, which is faster var fileOptions = baseFileOptions; if (recursive) { fileOptions |= DirectoryEnumerationOptions.Recursive; } DirectoryEnumerationFilters fileFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); } }; bool includeAllFiles = patterns.Count == 0 || (patterns.Count == 1 && (patterns[0] == "*.*" || patterns[0] == "*")); if (!includeAllFiles) { fileFilters.InclusionFilter = fsei => { foreach (string pattern in patterns) { if (WildcardMatch(fsei.FileName, pattern, true)) { return(true); } else if (pattern == "*.doc" && WildcardMatch(fsei.FileName, "*.doc*", true)) { return(true); } else if (pattern == "*.xls" && WildcardMatch(fsei.FileName, "*.xls*", true)) { return(true); } } return(false); }; } return(Directory.EnumerateFiles(path, fileOptions, fileFilters, PathFormat.FullPath)); }
public static IList <string> GetGitignoreDirectories(string path, bool recursive, bool followSymlinks) { if (File.Exists(Path.Combine(path, ".gitignore"))) { return new List <string> { path } } ; var fileOptions = baseFileOptions; if (recursive) { fileOptions |= DirectoryEnumerationOptions.Recursive; } if (followSymlinks) { fileOptions &= ~DirectoryEnumerationOptions.SkipReparsePoints; } List <string> dontRecurseBelow = new List <string> { @"C:\$Recycle.Bin" }; foreach (var sf in new[] { Environment.SpecialFolder.Windows, Environment.SpecialFolder.ProgramFiles, Environment.SpecialFolder.ProgramFilesX86, }) { string p = Environment.GetFolderPath(sf); if (!string.IsNullOrEmpty(p)) { dontRecurseBelow.Add(p); } } DirectoryEnumerationFilters fileFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); }, RecursionFilter = fsei => { if (Utils.CancelSearch) { throw new OperationCanceledException(); } if (fsei.IsDirectory && dontRecurseBelow.Any(p => fsei.FullPath.StartsWith(p, true, CultureInfo.CurrentCulture))) { return(false); } return(true); }, InclusionFilter = fsei => { if (Utils.CancelSearch) { throw new OperationCanceledException(); } if (fsei.FileName == ".gitignore") { dontRecurseBelow.Add(Path.GetDirectoryName(fsei.FullPath)); return(true); } return(false); } }; try { // search down subdirectories var list = Directory.EnumerateFiles(path, fileOptions, fileFilters, PathFormat.FullPath) .Select(s => Path.GetDirectoryName(s)).ToList(); if (list.Count == 0) { // not found, search up the tree DirectoryInfo di = new DirectoryInfo(path); while (di.Parent != null) { if (File.Exists(Path.Combine(di.Parent.FullName, ".gitignore"))) { list.Add(path); break; } di = di.Parent; } } return(list); } catch (OperationCanceledException) { return(new List <string>()); } }
private static IEnumerable <string> EnumerateFilesExcludeHidden(string path, IList <string> patterns, bool recursive) { // when checking for hidden directories, enumerate the directories separately from files to check for hidden flag on directories DirectoryInfo di = new DirectoryInfo(path); // the root of the drive has the hidden attribute set, so don't stop on this hidden directory if (di.Attributes.HasFlag(FileAttributes.Hidden) && (di.Root != di)) { yield break; } var dirOptions = baseDirOptions; if (recursive) { dirOptions |= DirectoryEnumerationOptions.Recursive; } DirectoryEnumerationFilters dirFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); } }; dirFilters.InclusionFilter = fsei => { return(!fsei.IsHidden); }; DirectoryEnumerationFilters fileFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); } }; bool includeAllFiles = patterns.Count == 0 || (patterns.Count == 1 && (patterns[0] == "*.*" || patterns[0] == "*")); if (includeAllFiles) { fileFilters.InclusionFilter = fsei => { return(!fsei.IsHidden); }; } else { fileFilters.InclusionFilter = fsei => { if (fsei.IsHidden) { return(false); } foreach (string pattern in patterns) { if (WildcardMatch(fsei.FileName, pattern, true)) { return(true); } else if (pattern == "*.doc" && WildcardMatch(fsei.FileName, "*.doc*", true)) { return(true); } else if (pattern == "*.xls" && WildcardMatch(fsei.FileName, "*.xls*", true)) { return(true); } } return(false); }; } IEnumerable <string> directories = new string[] { path }; if (recursive) { directories = directories.Concat(Directory.EnumerateDirectories(path, dirOptions, dirFilters, PathFormat.FullPath)); } foreach (var directory in directories) { IEnumerable <string> matches = Directory.EnumerateFiles(directory, baseFileOptions, fileFilters, PathFormat.FullPath); foreach (var file in matches) { yield return(file); } } }
private static void DoWork(string f, string d, string csv, string csvf, string json, string jsonf, string xml, string xmlf, string dt, string inc, string exc, string sd, string ed, bool fj, int tdt, bool met, string maps, bool vss, bool dedupe, bool sync, bool debug, bool trace) { var levelSwitch = new LoggingLevelSwitch(); _activeDateTimeFormat = dt; var formatter = new DateTimeOffsetFormatter(CultureInfo.CurrentCulture); var template = "{Message:lj}{NewLine}{Exception}"; if (debug) { levelSwitch.MinimumLevel = LogEventLevel.Debug; template = "[{Timestamp:HH:mm:ss.fff} {Level:u3}] {Message:lj}{NewLine}{Exception}"; } if (trace) { levelSwitch.MinimumLevel = LogEventLevel.Verbose; template = "[{Timestamp:HH:mm:ss.fff} {Level:u3}] {Message:lj}{NewLine}{Exception}"; } var conf = new LoggerConfiguration() .WriteTo.Console(outputTemplate: template, formatProvider: formatter) .MinimumLevel.ControlledBy(levelSwitch); Log.Logger = conf.CreateLogger(); if (sync) { try { Log.Information("{Header}", Header); UpdateFromRepo(); } catch (Exception e) { Log.Error(e, "There was an error checking for updates: {Message}", e.Message); } Environment.Exit(0); } if (f.IsNullOrEmpty() && d.IsNullOrEmpty()) { var helpBld = new HelpBuilder(LocalizationResources.Instance, Console.WindowWidth); var hc = new HelpContext(helpBld, _rootCommand, Console.Out); helpBld.Write(hc); Log.Warning("-f or -d is required. Exiting"); Console.WriteLine(); return; } Log.Information("{Header}", Header); Console.WriteLine(); Log.Information("Command line: {Args}", string.Join(" ", Environment.GetCommandLineArgs().Skip(1))); Console.WriteLine(); if (IsAdministrator() == false) { Log.Warning("Warning: Administrator privileges not found!"); Console.WriteLine(); } if (vss & !RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { vss = false; Log.Warning("{Vss} not supported on non-Windows platforms. Disabling...", "--vss"); Console.WriteLine(); } if (vss & (IsAdministrator() == false)) { Log.Error("{Vss} is present, but administrator rights not found. Exiting", "--vss"); Console.WriteLine(); return; } var sw = new Stopwatch(); sw.Start(); var ts = DateTimeOffset.UtcNow; _errorFiles = new Dictionary <string, int>(); if (json.IsNullOrEmpty() == false) { if (Directory.Exists(json) == false) { Log.Information("Path to {Json} doesn't exist. Creating...", json); try { Directory.CreateDirectory(json); } catch (Exception ex) { Log.Fatal(ex, "Unable to create directory {Json}. Does a file with the same name exist? Exiting", json); Console.WriteLine(); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.json"; if (jsonf.IsNullOrEmpty() == false) { outName = Path.GetFileName(jsonf); } var outFile = Path.Combine(json, outName); Log.Information("json output will be saved to {OutFile}", outFile); Console.WriteLine(); try { _swJson = new StreamWriter(outFile, false, Encoding.UTF8); } catch (Exception ex) { Log.Error(ex, "Unable to open {OutFile}! Is it in use? Exiting!", outFile); Console.WriteLine(); Environment.Exit(0); } JsConfig.DateHandler = DateHandler.ISO8601; } if (xml.IsNullOrEmpty() == false) { if (Directory.Exists(xml) == false) { Log.Information("Path to {Xml} doesn't exist. Creating...", xml); try { Directory.CreateDirectory(xml); } catch (Exception ex) { Log.Fatal(ex, "Unable to create directory {Xml}. Does a file with the same name exist? Exiting", xml); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.xml"; if (xmlf.IsNullOrEmpty() == false) { outName = Path.GetFileName(xmlf); } var outFile = Path.Combine(xml, outName); Log.Information("XML output will be saved to {OutFile}", outFile); Console.WriteLine(); try { _swXml = new StreamWriter(outFile, false, Encoding.UTF8); } catch (Exception ex) { Log.Error(ex, "Unable to open {OutFile}! Is it in use? Exiting!", outFile); Console.WriteLine(); Environment.Exit(0); } } if (sd.IsNullOrEmpty() == false) { if (DateTimeOffset.TryParse(sd, null, DateTimeStyles.AssumeUniversal, out var dateTimeOffset)) { _startDate = dateTimeOffset; Log.Information("Setting Start date to {StartDate}", _startDate.Value); } else { Log.Warning("Could not parse {Sd} to a valid datetime! Events will not be filtered by Start date!", sd); } } if (ed.IsNullOrEmpty() == false) { if (DateTimeOffset.TryParse(ed, null, DateTimeStyles.AssumeUniversal, out var dateTimeOffset)) { _endDate = dateTimeOffset; Log.Information("Setting End date to {EndDate}", _endDate.Value); } else { Log.Warning("Could not parse {Ed} to a valid datetime! Events will not be filtered by End date!", ed); } } if (_startDate.HasValue || _endDate.HasValue) { Console.WriteLine(); } if (csv.IsNullOrEmpty() == false) { if (Directory.Exists(csv) == false) { Log.Information( "Path to {Csv} doesn't exist. Creating...", csv); try { Directory.CreateDirectory(csv); } catch (Exception ex) { Log.Fatal(ex, "Unable to create directory {Csv}. Does a file with the same name exist? Exiting", csv); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.csv"; if (csvf.IsNullOrEmpty() == false) { outName = Path.GetFileName(csvf); } var outFile = Path.Combine(csv, outName); Log.Information("CSV output will be saved to {OutFile}", outFile); Console.WriteLine(); try { _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); var opt = new CsvConfiguration(CultureInfo.InvariantCulture) { ShouldUseConstructorParameters = _ => false }; _csvWriter = new CsvWriter(_swCsv, opt); } catch (Exception ex) { Log.Error(ex, "Unable to open {OutFile}! Is it in use? Exiting!", outFile); Console.WriteLine(); Environment.Exit(0); } var foo = _csvWriter.Context.AutoMap <EventRecord>(); foo.Map(t => t.RecordPosition).Ignore(); foo.Map(t => t.Size).Ignore(); foo.Map(t => t.Timestamp).Ignore(); foo.Map(t => t.RecordNumber).Index(0); foo.Map(t => t.EventRecordId).Index(1); foo.Map(t => t.TimeCreated).Index(2); foo.Map(t => t.TimeCreated).Convert(t => $"{t.Value.TimeCreated.ToString(dt)}"); foo.Map(t => t.EventId).Index(3); foo.Map(t => t.Level).Index(4); foo.Map(t => t.Provider).Index(5); foo.Map(t => t.Channel).Index(6); foo.Map(t => t.ProcessId).Index(7); foo.Map(t => t.ThreadId).Index(8); foo.Map(t => t.Computer).Index(9); foo.Map(t => t.UserId).Index(10); foo.Map(t => t.MapDescription).Index(11); foo.Map(t => t.UserName).Index(12); foo.Map(t => t.RemoteHost).Index(13); foo.Map(t => t.PayloadData1).Index(14); foo.Map(t => t.PayloadData2).Index(15); foo.Map(t => t.PayloadData3).Index(16); foo.Map(t => t.PayloadData4).Index(17); foo.Map(t => t.PayloadData5).Index(18); foo.Map(t => t.PayloadData6).Index(19); foo.Map(t => t.ExecutableInfo).Index(20); foo.Map(t => t.HiddenRecord).Index(21); foo.Map(t => t.SourceFile).Index(22); foo.Map(t => t.Keywords).Index(23); foo.Map(t => t.Payload).Index(24); _csvWriter.Context.RegisterClassMap(foo); _csvWriter.WriteHeader <EventRecord>(); _csvWriter.NextRecord(); } if (Directory.Exists(maps) == false) { Log.Warning("Maps directory {Maps} does not exist! Event ID maps will not be loaded!!", maps); } else { Log.Debug("Loading maps from {Path}", Path.GetFullPath(maps)); var errors = EventLog.LoadMaps(Path.GetFullPath(maps)); if (errors) { return; } Log.Information("Maps loaded: {Count:N0}", EventLog.EventLogMaps.Count); } _includeIds = new HashSet <int>(); _excludeIds = new HashSet <int>(); if (exc.IsNullOrEmpty() == false) { var excSegs = exc.Split(','); foreach (var incSeg in excSegs) { if (int.TryParse(incSeg, out var goodId)) { _excludeIds.Add(goodId); } } } if (inc.IsNullOrEmpty() == false) { _excludeIds.Clear(); var incSegs = inc.Split(','); foreach (var incSeg in incSegs) { if (int.TryParse(incSeg, out var goodId)) { _includeIds.Add(goodId); } } } if (vss) { string driveLetter; if (f.IsEmpty() == false) { driveLetter = Path.GetPathRoot(Path.GetFullPath(f)) .Substring(0, 1); } else { driveLetter = Path.GetPathRoot(Path.GetFullPath(d)) .Substring(0, 1); } Helper.MountVss(driveLetter, VssDir); Console.WriteLine(); } EventLog.TimeDiscrepancyThreshold = tdt; if (f.IsNullOrEmpty() == false) { if (File.Exists(f) == false) { Log.Warning("\t{F} does not exist! Exiting", f); Console.WriteLine(); return; } if (_swXml == null && _swJson == null && _swCsv == null) { //no need for maps Log.Debug("Clearing map collection since no output specified"); EventLog.EventLogMaps.Clear(); } dedupe = false; ProcessFile(Path.GetFullPath(f), dedupe, fj, met); if (vss) { var vssDirs = Directory.GetDirectories(VssDir); var root = Path.GetPathRoot(Path.GetFullPath(f)); var stem = Path.GetFullPath(f).Replace(root, ""); foreach (var vssDir in vssDirs) { var newPath = Path.Combine(vssDir, stem); if (File.Exists(newPath)) { ProcessFile(newPath, dedupe, fj, met); } } } } else { if (Directory.Exists(d) == false) { Log.Warning("\t{D} does not exist! Exiting", d); Console.WriteLine(); return; } Log.Information("Looking for event log files in {D}", d); Console.WriteLine(); #if !NET6_0 var directoryEnumerationFilters = new DirectoryEnumerationFilters { InclusionFilter = fsei => fsei.Extension.ToUpperInvariant() == ".EVTX", RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink, ErrorFilter = (errorCode, errorMessage, pathProcessed) => true }; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; var files2 = Directory.EnumerateFileSystemEntries(Path.GetFullPath(d), dirEnumOptions, directoryEnumerationFilters); #else var enumerationOptions = new EnumerationOptions { IgnoreInaccessible = true, MatchCasing = MatchCasing.CaseInsensitive, RecurseSubdirectories = true, AttributesToSkip = 0 }; var files2 = Directory.EnumerateFileSystemEntries(d, "*.evtx", enumerationOptions); #endif if (_swXml == null && _swJson == null && _swCsv == null) { //no need for maps Log.Debug("Clearing map collection since no output specified"); EventLog.EventLogMaps.Clear(); } foreach (var file in files2) { ProcessFile(file, dedupe, fj, met); } if (vss) { var vssDirs = Directory.GetDirectories(VssDir); Console.WriteLine(); foreach (var vssDir in vssDirs) { var root = Path.GetPathRoot(Path.GetFullPath(d)); var stem = Path.GetFullPath(d).Replace(root, ""); var target = Path.Combine(vssDir, stem); Console.WriteLine(); Log.Information("Searching {Vss} for event logs...", $"VSS{target.Replace($"{VssDir}\\", "")}"); var vssFiles = Helper.GetFilesFromPath(target, "*.evtx", true); foreach (var file in vssFiles) { ProcessFile(file, dedupe, fj, met); } } } } try { _swCsv?.Flush(); _swCsv?.Close(); _swJson?.Flush(); _swJson?.Close(); _swXml?.Flush(); _swXml?.Close(); } catch (Exception e) { Log.Error(e, "Error when flushing output files to disk! Error message: {Message}", e.Message); } sw.Stop(); Console.WriteLine(); if (_fileCount == 1) { Log.Information("Processed {FileCount:N0} file in {TotalSeconds:N4} seconds", _fileCount, sw.Elapsed.TotalSeconds); } else { Log.Information("Processed {FileCount:N0} files in {TotalSeconds:N4} seconds", _fileCount, sw.Elapsed.TotalSeconds); } Console.WriteLine(); if (_errorFiles.Count > 0) { Console.WriteLine(); Log.Information("Files with errors"); foreach (var errorFile in _errorFiles) { Log.Information("{Key} error count: {Value:N0}", errorFile.Key, errorFile.Value); } Console.WriteLine(); } if (vss) { if (Directory.Exists(VssDir)) { foreach (var directory in Directory.GetDirectories(VssDir)) { Directory.Delete(directory); } #if !NET6_0 Directory.Delete(VssDir, true, true); #else Directory.Delete(VssDir, true); #endif } } }
public static IList <string> GetGitignoreDirectories(string path, bool recursive, bool followSymlinks) { if (File.Exists(Path.Combine(path, ".gitignore"))) { return new List <string> { path } } ; var fileOptions = baseFileOptions; if (recursive) { fileOptions |= DirectoryEnumerationOptions.Recursive; } if (followSymlinks) { fileOptions &= ~DirectoryEnumerationOptions.SkipReparsePoints; } List <string> dontRecurseBelow = new List <string>(); DirectoryEnumerationFilters fileFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); }, RecursionFilter = fsei => { if (fsei.IsDirectory && dontRecurseBelow.Any(p => fsei.FullPath.StartsWith(p, StringComparison.CurrentCulture))) { return(false); } return(true); }, InclusionFilter = fsei => { if (fsei.FileName == ".gitignore") { dontRecurseBelow.Add(Path.GetDirectoryName(fsei.FullPath)); return(true); } return(false); } }; var list = Directory.EnumerateFiles(path, fileOptions, fileFilters, PathFormat.FullPath) .Select(s => Path.GetDirectoryName(s)).ToList(); if (list.Count == 0) { DirectoryInfo di = new DirectoryInfo(path); while (di.Parent != null) { if (File.Exists(Path.Combine(di.Parent.FullName, ".gitignore"))) { list.Add(path); break; } di = di.Parent; } } return(list); }
private static IEnumerable <string> EnumerateDirectoriesImpl(string path, FileFilter filter, int startDepth, Gitignore gitignore) { var dirOptions = baseDirOptions; if (filter.IncludeSubfolders) { dirOptions |= DirectoryEnumerationOptions.Recursive; } if (filter.FollowSymlinks) { dirOptions &= ~DirectoryEnumerationOptions.SkipReparsePoints; } DirectoryEnumerationFilters dirFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); }, RecursionFilter = fsei => { if (gitignore != null && gitignore.Directories.Contains(fsei.FullPath)) { return(false); } if (!filter.IncludeHidden && fsei.IsHidden) { return(false); } if (filter.MaxSubfolderDepth >= 0) { int depth = GetDepth(new DirectoryInfo(fsei.FullPath)); if (depth - startDepth > filter.MaxSubfolderDepth) { return(false); } } if (filter.UseGitIgnore && fsei.FileName == ".git") { return(false); } return(true); }, InclusionFilter = fsei => { if (gitignore != null && gitignore.Directories.Contains(fsei.FullPath)) { return(false); } if (!filter.IncludeHidden && fsei.IsHidden) { return(false); } if (filter.MaxSubfolderDepth >= 0) { int depth = GetDepth(new DirectoryInfo(fsei.FullPath)); if (depth - startDepth > filter.MaxSubfolderDepth) { return(false); } } if (filter.UseGitIgnore && fsei.FileName == ".git") { return(false); } return(true); }, }; return(Directory.EnumerateDirectories(path, dirOptions, dirFilters, PathFormat.FullPath)); }
public static List <ModProjectData> LoadModProjects(DOS2DEModuleData Data, bool clearExisting = false) { if (clearExisting) { Data.ModProjects.Clear(); Log.Here().Important("Cleared mod project data."); } List <ModProjectData> newItems = new List <ModProjectData>(); if (Data.Settings != null && !String.IsNullOrEmpty(Data.Settings.DOS2DEDataDirectory)) { if (Directory.Exists(Data.Settings.DOS2DEDataDirectory)) { string projectsPath = Path.Combine(Data.Settings.DOS2DEDataDirectory, "Projects"); string modsPath = Path.Combine(Data.Settings.DOS2DEDataDirectory, "Mods"); if (Directory.Exists(modsPath)) { Log.Here().Activity("Loading DOS2 projects from mods directory at: {0}", modsPath); DirectoryEnumerationFilters filters = new DirectoryEnumerationFilters() { InclusionFilter = (f) => { return(!IgnoredFolders.Contains(f.FileName)); }, }; var modFolders = Directory.EnumerateDirectories(modsPath, DirectoryEnumerationOptions.Folders, filters, PathFormat.LongFullPath); if (modFolders != null) { foreach (string modFolder in modFolders) { var modFolderName = Path.GetFileName(modFolder); //Log.Here().Activity("Checking project mod folder: {0}", modFolderName); var metaFilePath = Path.Combine(modFolder, "meta.lsx"); if (File.Exists(metaFilePath)) { //Log.Here().Activity("Meta file found for project {0}. Reading file.", modFolderName); ModProjectData modProjectData = new ModProjectData(); modProjectData.LoadAllData(metaFilePath, projectsPath); //Log.Here().Activity("Finished reading meta files for mod: {0}", modProjectData.ModuleInfo.Name); if (!clearExisting) { var previous = Data.ModProjects.Items.FirstOrDefault(p => p.FolderName == modProjectData.FolderName); if (previous != null) { if (previous.DataIsNewer(modProjectData)) { previous.UpdateData(modProjectData); } } else { newItems.Add(modProjectData); } } else { newItems.Add(modProjectData); } } } } } } else { Log.Here().Error("Loading available projects failed. DOS2 data directory not found at {0}", Data.Settings.DOS2DEDataDirectory); } } return(newItems); }
public static async Task <List <ModProjectData> > LoadModProjectsAsync(DOS2DEModuleData Data, bool clearExisting = false) { if (clearExisting) { Log.Here().Important("Clearing mod projects"); if (Thread.CurrentThread.IsBackground) { await Observable.Start(() => { Data.ModProjects.Clear(); return(Unit.Default); }, RxApp.MainThreadScheduler); } else { Data.ModProjects.Clear(); } } List <ModProjectData> newItems = new List <ModProjectData>(); if (Data.Settings != null && !String.IsNullOrEmpty(Data.Settings.DOS2DEDataDirectory)) { if (Directory.Exists(Data.Settings.DOS2DEDataDirectory)) { string projectsPath = Path.Combine(Data.Settings.DOS2DEDataDirectory, "Projects"); string modsPath = Path.Combine(Data.Settings.DOS2DEDataDirectory, "Mods"); if (Directory.Exists(modsPath)) { Log.Here().Activity($"Loading DOS2 projects from mods directory at: {modsPath}"); //DirectoryInfo modsRoot = new DirectoryInfo(modsPath); //var modFolders = modsRoot.GetDirectories().Where(s => !IgnoredFolders.Contains(s.Name)); DirectoryEnumerationFilters filters = new DirectoryEnumerationFilters() { InclusionFilter = (f) => { return(!IgnoredFolders.Contains(f.FileName)); }, }; //var modFolders = await Observable.Start(() => //{ // return Directory.EnumerateDirectories(modsPath, DirectoryEnumerationOptions.Folders, filters, PathFormat.LongFullPath); //}, RxApp.TaskpoolScheduler); var modFolders = Directory.EnumerateDirectories(modsPath, DirectoryEnumerationOptions.Folders, filters, PathFormat.LongFullPath); if (modFolders != null) { foreach (string modFolder in modFolders) { try { var modFolderName = Path.GetFileName(modFolder); Log.Here().Activity($"Checking project mod folder: {modFolderName}"); var metaFilePath = Path.Combine(modFolder, "meta.lsx"); if (File.Exists(metaFilePath)) { Log.Here().Activity($"Meta file found for project {modFolderName}. Reading file."); ModProjectData modProjectData = new ModProjectData(); await modProjectData.LoadAllDataAsync(metaFilePath, projectsPath); Log.Here().Activity($"Finished reading meta files for mod: {modProjectData.ModuleInfo.Name}"); if (!clearExisting) { var previous = Data.ModProjects.Items.FirstOrDefault(p => p.FolderName == modProjectData.FolderName); if (previous != null) { if (previous.DataIsNewer(modProjectData)) { if (Thread.CurrentThread.IsBackground) { await Observable.Start(() => { previous.UpdateData(modProjectData); return(Unit.Default); }, RxApp.MainThreadScheduler); } else { previous.UpdateData(modProjectData); } } } else { newItems.Add(modProjectData); } } else { newItems.Add(modProjectData); } } } catch (Exception ex) { Log.Here().Error($"Error parsing mod folder ${modFolder}:"); Log.Here().Error(ex.ToString()); } } } } } else { Log.Here().Error("Loading available projects failed. DOS2 data directory not found at {0}", Data.Settings.DOS2DEDataDirectory); } } return(newItems); }
private static List <string> GetRecycleBinFiles(string dir) { var files = new List <string>(); #if NET6_0 var enumerationOptions = new EnumerationOptions { IgnoreInaccessible = true, MatchCasing = MatchCasing.CaseSensitive, RecurseSubdirectories = true, AttributesToSkip = 0 }; var files2 = Directory.EnumerateFileSystemEntries(dir, "$I*", enumerationOptions); files.AddRange(files2); files2 = Directory.EnumerateFileSystemEntries(dir, "INFO2", enumerationOptions); files.AddRange(files2); #elif NET462 Privilege[] privs = { Privilege.EnableDelegation, Privilege.Impersonate, Privilege.Tcb }; using var enabler = new PrivilegeEnabler(Privilege.Backup, privs); var filters = new DirectoryEnumerationFilters { // Used to abort the enumeration. // CancellationToken = cancelSource.Token, // Filter to decide whether to recurse into subdirectories. RecursionFilter = entryInfo => { if (!entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink) { return(true); } return(false); }, // Filter to process Exception handling. ErrorFilter = delegate(int errorCode, string errorMessage, string pathProcessed) { Log.Error("Error accessing {PathProcessed}. Error: {ErrorMessage}", pathProcessed, errorCode); // Return true to continue, false to throw the Exception. return(true); }, // Filter to in-/exclude file system entries during the enumeration. InclusionFilter = entryInfo => { if (entryInfo.FileName == "INFO2" || entryInfo.FileName.StartsWith("$I")) { Log.Debug("Found match: {FullPath}", entryInfo.FullPath); return(true); } return(false); } }; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.SkipReparsePoints; files.AddRange(Directory.EnumerateFileSystemEntryInfos <string>(dir, dirEnumOptions, filters).Where(File.Exists)); #endif return(files); }
private static void Main(string[] args) { ExceptionlessClient.Default.Startup("x3MPpeQSBUUsXl3DjekRQ9kYjyN3cr5JuwdoOBpZ"); SetupNLog(); _keywords = new HashSet <string> { "temp", "tmp" }; _logger = LogManager.GetCurrentClassLogger(); _fluentCommandLineParser = new FluentCommandLineParser <ApplicationArguments> { IsCaseSensitive = false }; _fluentCommandLineParser.Setup(arg => arg.File) .As('f') .WithDescription("File to process. Either this or -d is required"); _fluentCommandLineParser.Setup(arg => arg.Directory) .As('d') .WithDescription("Directory to recursively process. Either this or -f is required"); _fluentCommandLineParser.Setup(arg => arg.Keywords) .As('k') .WithDescription( "Comma separated list of keywords to highlight in output. By default, 'temp' and 'tmp' are highlighted. Any additional keywords will be added to these."); _fluentCommandLineParser.Setup(arg => arg.OutFile) .As('o') .WithDescription( "When specified, save prefetch file bytes to the given path. Useful to look at decompressed Win10 files"); _fluentCommandLineParser.Setup(arg => arg.Quiet) .As('q') .WithDescription( "Do not dump full details about each file processed. Speeds up processing when using --json or --csv. Default is FALSE\r\n") .SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.JsonDirectory) .As("json") .WithDescription( "Directory to save json representation to. Use --pretty for a more human readable layout"); _fluentCommandLineParser.Setup(arg => arg.CsvDirectory) .As("csv") .WithDescription( "Directory to save CSV results to. Be sure to include the full path in double quotes"); _fluentCommandLineParser.Setup(arg => arg.CsvName) .As("csvf") .WithDescription("File name to save CSV formatted results to. When present, overrides default name"); _fluentCommandLineParser.Setup(arg => arg.xHtmlDirectory) .As("html") .WithDescription( "Directory to save xhtml formatted results to. Be sure to include the full path in double quotes"); _fluentCommandLineParser.Setup(arg => arg.JsonPretty) .As("pretty") .WithDescription( "When exporting to json, use a more human readable layout. Default is FALSE\r\n").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.DateTimeFormat) .As("dt") .WithDescription( "The custom date/time format to use when displaying timestamps. See https://goo.gl/CNVq0k for options. Default is: yyyy-MM-dd HH:mm:ss") .SetDefault("yyyy-MM-dd HH:mm:ss"); _fluentCommandLineParser.Setup(arg => arg.PreciseTimestamps) .As("mp") .WithDescription( "When true, display higher precision for timestamps. Default is FALSE").SetDefault(false); var header = $"PECmd version {Assembly.GetExecutingAssembly().GetName().Version}" + "\r\n\r\nAuthor: Eric Zimmerman ([email protected])" + "\r\nhttps://github.com/EricZimmerman/PECmd"; var footer = @"Examples: PECmd.exe -f ""C:\Temp\CALC.EXE-3FBEF7FD.pf""" + "\r\n\t " + @" PECmd.exe -f ""C:\Temp\CALC.EXE-3FBEF7FD.pf"" --json ""D:\jsonOutput"" --jsonpretty" + "\r\n\t " + @" PECmd.exe -d ""C:\Temp"" -k ""system32, fonts""" + "\r\n\t " + @" PECmd.exe -d ""C:\Temp"" --csv ""c:\temp"" --csvf foo.csv --json c:\temp\json" + "\r\n\t " + @" PECmd.exe -d ""C:\Windows\Prefetch""" + "\r\n\t " + "\r\n\t" + " Short options (single letter) are prefixed with a single dash. Long commands are prefixed with two dashes\r\n"; _fluentCommandLineParser.SetupHelp("?", "help") .WithHeader(header) .Callback(text => _logger.Info(text + "\r\n" + footer)); var result = _fluentCommandLineParser.Parse(args); if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.File) && UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.Directory)) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("Either -f or -d is required. Exiting"); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.File) == false && !File.Exists(_fluentCommandLineParser.Object.File)) { _logger.Warn($"File '{_fluentCommandLineParser.Object.File}' not found. Exiting"); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.Directory) == false && !Directory.Exists(_fluentCommandLineParser.Object.Directory)) { _logger.Warn($"Directory '{_fluentCommandLineParser.Object.Directory}' not found. Exiting"); return; } if (_fluentCommandLineParser.Object.Keywords?.Length > 0) { var kws = _fluentCommandLineParser.Object.Keywords.ToLowerInvariant().Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); foreach (var kw in kws) { _keywords.Add(kw.Trim()); } } _logger.Info(header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}"); if (IsAdministrator() == false) { _logger.Fatal("\r\nWarning: Administrator privileges not found!"); } _logger.Info(""); _logger.Info($"Keywords: {string.Join(", ", _keywords)}"); _logger.Info(""); if (_fluentCommandLineParser.Object.PreciseTimestamps) { _fluentCommandLineParser.Object.DateTimeFormat = _preciseTimeFormat; } _processedFiles = new List <IPrefetch>(); _failedFiles = new List <string>(); if (_fluentCommandLineParser.Object.File?.Length > 0) { IPrefetch pf = null; try { pf = LoadFile(_fluentCommandLineParser.Object.File); if (pf != null) { if (_fluentCommandLineParser.Object.OutFile.IsNullOrEmpty() == false) { try { if (Directory.Exists(Path.GetDirectoryName(_fluentCommandLineParser.Object.OutFile)) == false) { Directory.CreateDirectory( Path.GetDirectoryName(_fluentCommandLineParser.Object.OutFile)); } PrefetchFile.SavePrefetch(_fluentCommandLineParser.Object.OutFile, pf); _logger.Info($"Saved prefetch bytes to '{_fluentCommandLineParser.Object.OutFile}'"); } catch (Exception e) { _logger.Error($"Unable to save prefetch file. Error: {e.Message}"); } } _processedFiles.Add(pf); } } catch (UnauthorizedAccessException ex) { _logger.Error( $"Unable to access '{_fluentCommandLineParser.Object.File}'. Are you running as an administrator? Error: {ex.Message}"); } catch (Exception ex) { _logger.Error( $"Error getting prefetch files in '{_fluentCommandLineParser.Object.Directory}'. Error: {ex.Message}"); } } else { _logger.Info($"Looking for prefetch files in '{_fluentCommandLineParser.Object.Directory}'"); _logger.Info(""); string[] pfFiles = null; var f = new DirectoryEnumerationFilters(); f.InclusionFilter = fsei => { if (fsei.Extension.ToUpperInvariant() == ".PF") { return(true); } return(false); }; f.RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; f.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; var files2 = Alphaleonis.Win32.Filesystem.Directory.EnumerateFileSystemEntries(_fluentCommandLineParser.Object.Directory, dirEnumOptions, f); try { pfFiles = files2.ToArray(); //Directory.GetFiles(_fluentCommandLineParser.Object.Directory, "*.pf", SearchOption.AllDirectories); } catch (UnauthorizedAccessException ua) { _logger.Error( $"Unable to access '{_fluentCommandLineParser.Object.Directory}'. Are you running as an administrator? Error: {ua.Message}"); return; } catch (Exception ex) { _logger.Error( $"Error getting prefetch files in '{_fluentCommandLineParser.Object.Directory}'. Error: {ex.Message}"); return; } _logger.Info($"Found {pfFiles.Length:N0} Prefetch files"); _logger.Info(""); var sw = new Stopwatch(); sw.Start(); foreach (var file in pfFiles) { var pf = LoadFile(file); if (pf != null) { _processedFiles.Add(pf); } } sw.Stop(); if (_fluentCommandLineParser.Object.Quiet) { _logger.Info(""); } _logger.Info( $"Processed {pfFiles.Length - _failedFiles.Count:N0} out of {pfFiles.Length:N0} files in {sw.Elapsed.TotalSeconds:N4} seconds"); if (_failedFiles.Count > 0) { _logger.Info(""); _logger.Warn("Failed files"); foreach (var failedFile in _failedFiles) { _logger.Info($" {failedFile}"); } } } if (_processedFiles.Count > 0) { _logger.Info(""); try { CsvWriter csv = null; StreamWriter streamWriter = null; CsvWriter csvTl = null; StreamWriter streamWriterTl = null; if (_fluentCommandLineParser.Object.CsvDirectory?.Length > 0) { var outName = $"{DateTimeOffset.Now:yyyyMMddHHmmss}_PECmd_Output.csv"; if (_fluentCommandLineParser.Object.CsvName.IsNullOrEmpty() == false) { outName = Path.GetFileName(_fluentCommandLineParser.Object.CsvName); } var outNameTl = $"{DateTimeOffset.Now:yyyyMMddHHmmss}_PECmd_Output_Timeline.csv"; if (_fluentCommandLineParser.Object.CsvName.IsNullOrEmpty() == false) { outNameTl = $"{Path.GetFileNameWithoutExtension(_fluentCommandLineParser.Object.CsvName)}_Timeline{Path.GetExtension(_fluentCommandLineParser.Object.CsvName)}"; } var outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); var outFileTl = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outNameTl); if (Directory.Exists(_fluentCommandLineParser.Object.CsvDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.CsvDirectory}' does not exist. Creating..."); Directory.CreateDirectory(_fluentCommandLineParser.Object.CsvDirectory); } _logger.Warn($"CSV output will be saved to '{outFile}'"); _logger.Warn($"CSV time line output will be saved to '{outFileTl}'"); try { streamWriter = new StreamWriter(outFile); csv = new CsvWriter(streamWriter); csv.WriteHeader(typeof(CsvOut)); csv.NextRecord(); streamWriterTl = new StreamWriter(outFileTl); csvTl = new CsvWriter(streamWriterTl); csvTl.WriteHeader(typeof(CsvOutTl)); csvTl.NextRecord(); } catch (Exception ex) { _logger.Error( $"Unable to open '{outFile}' for writing. CSV export canceled. Error: {ex.Message}"); } } if (_fluentCommandLineParser.Object.JsonDirectory?.Length > 0) { if (Directory.Exists(_fluentCommandLineParser.Object.JsonDirectory) == false) { _logger.Warn( $"'{_fluentCommandLineParser.Object.JsonDirectory} does not exist. Creating...'"); Directory.CreateDirectory(_fluentCommandLineParser.Object.JsonDirectory); } _logger.Warn($"Saving json output to '{_fluentCommandLineParser.Object.JsonDirectory}'"); } XmlTextWriter xml = null; if (_fluentCommandLineParser.Object.xHtmlDirectory?.Length > 0) { if (Directory.Exists(_fluentCommandLineParser.Object.xHtmlDirectory) == false) { _logger.Warn( $"'{_fluentCommandLineParser.Object.xHtmlDirectory} does not exist. Creating...'"); Directory.CreateDirectory(_fluentCommandLineParser.Object.xHtmlDirectory); } var outDir = Path.Combine(_fluentCommandLineParser.Object.xHtmlDirectory, $"{DateTimeOffset.UtcNow:yyyyMMddHHmmss}_PECmd_Output_for_{_fluentCommandLineParser.Object.xHtmlDirectory.Replace(@":\", "_").Replace(@"\", "_")}"); if (Directory.Exists(outDir) == false) { Directory.CreateDirectory(outDir); } var styleDir = Path.Combine(outDir, "styles"); if (Directory.Exists(styleDir) == false) { Directory.CreateDirectory(styleDir); } File.WriteAllText(Path.Combine(styleDir, "normalize.css"), Resources.normalize); File.WriteAllText(Path.Combine(styleDir, "style.css"), Resources.style); Resources.directories.Save(Path.Combine(styleDir, "directories.png")); Resources.filesloaded.Save(Path.Combine(styleDir, "filesloaded.png")); var outFile = Path.Combine(_fluentCommandLineParser.Object.xHtmlDirectory, outDir, "index.xhtml"); _logger.Warn($"Saving HTML output to '{outFile}'"); xml = new XmlTextWriter(outFile, Encoding.UTF8) { Formatting = Formatting.Indented, Indentation = 4 }; xml.WriteStartDocument(); xml.WriteProcessingInstruction("xml-stylesheet", "href=\"styles/normalize.css\""); xml.WriteProcessingInstruction("xml-stylesheet", "href=\"styles/style.css\""); xml.WriteStartElement("document"); } if (_fluentCommandLineParser.Object.CsvDirectory.IsNullOrEmpty() == false || _fluentCommandLineParser.Object.JsonDirectory.IsNullOrEmpty() == false || _fluentCommandLineParser.Object.xHtmlDirectory.IsNullOrEmpty() == false) { foreach (var processedFile in _processedFiles) { var o = GetCsvFormat(processedFile); try { foreach (var dateTimeOffset in processedFile.LastRunTimes) { var t = new CsvOutTl(); var exePath = processedFile.Filenames.FirstOrDefault( y => y.EndsWith(processedFile.Header.ExecutableFilename)); if (exePath == null) { exePath = processedFile.Header.ExecutableFilename; } t.ExecutableName = exePath; t.RunTime = dateTimeOffset.ToString(_fluentCommandLineParser.Object.DateTimeFormat); csvTl?.WriteRecord(t); csvTl?.NextRecord(); } } catch (Exception ex) { _logger.Error( $"Error getting time line record for '{processedFile.SourceFilename}' to '{_fluentCommandLineParser.Object.CsvDirectory}'. Error: {ex.Message}"); } try { csv?.WriteRecord(o); csv?.NextRecord(); } catch (Exception ex) { _logger.Error( $"Error writing CSV record for '{processedFile.SourceFilename}' to '{_fluentCommandLineParser.Object.CsvDirectory}'. Error: {ex.Message}"); } if (_fluentCommandLineParser.Object.JsonDirectory?.Length > 0) { SaveJson(processedFile, _fluentCommandLineParser.Object.JsonPretty, _fluentCommandLineParser.Object.JsonDirectory); } //XHTML xml?.WriteStartElement("Container"); xml?.WriteElementString("SourceFile", o.SourceFilename); xml?.WriteElementString("SourceCreated", o.SourceCreated); xml?.WriteElementString("SourceModified", o.SourceModified); xml?.WriteElementString("SourceAccessed", o.SourceAccessed); xml?.WriteElementString("LastRun", o.LastRun); xml?.WriteElementString("PreviousRun0", $"{o.PreviousRun0}"); xml?.WriteElementString("PreviousRun1", $"{o.PreviousRun1}"); xml?.WriteElementString("PreviousRun2", $"{o.PreviousRun2}"); xml?.WriteElementString("PreviousRun3", $"{o.PreviousRun3}"); xml?.WriteElementString("PreviousRun4", $"{o.PreviousRun4}"); xml?.WriteElementString("PreviousRun5", $"{o.PreviousRun5}"); xml?.WriteElementString("PreviousRun6", $"{o.PreviousRun6}"); xml?.WriteStartElement("ExecutableName"); xml?.WriteAttributeString("title", "Note: The name of the executable tracked by the pf file"); xml?.WriteString(o.ExecutableName); xml?.WriteEndElement(); xml?.WriteElementString("RunCount", $"{o.RunCount}"); xml?.WriteStartElement("Size"); xml?.WriteAttributeString("title", "Note: The size of the executable in bytes"); xml?.WriteString(o.Size); xml?.WriteEndElement(); xml?.WriteStartElement("Hash"); xml?.WriteAttributeString("title", "Note: The calculated hash for the pf file that should match the hash in the source file name"); xml?.WriteString(o.Hash); xml?.WriteEndElement(); xml?.WriteStartElement("Version"); xml?.WriteAttributeString("title", "Note: The operating system that generated the prefetch file"); xml?.WriteString(o.Version); xml?.WriteEndElement(); xml?.WriteElementString("Note", o.Note); xml?.WriteElementString("Volume0Name", o.Volume0Name); xml?.WriteElementString("Volume0Serial", o.Volume0Serial); xml?.WriteElementString("Volume0Created", o.Volume0Created); xml?.WriteElementString("Volume1Name", o.Volume1Name); xml?.WriteElementString("Volume1Serial", o.Volume1Serial); xml?.WriteElementString("Volume1Created", o.Volume1Created); xml?.WriteStartElement("Directories"); xml?.WriteAttributeString("title", "A comma separated list of all directories accessed by the executable"); xml?.WriteString(o.Directories); xml?.WriteEndElement(); xml?.WriteStartElement("FilesLoaded"); xml?.WriteAttributeString("title", "A comma separated list of all files that were loaded by the executable"); xml?.WriteString(o.FilesLoaded); xml?.WriteEndElement(); xml?.WriteEndElement(); } //Close CSV stuff streamWriter?.Flush(); streamWriter?.Close(); streamWriterTl?.Flush(); streamWriterTl?.Close(); //Close XML xml?.WriteEndElement(); xml?.WriteEndDocument(); xml?.Flush(); } } catch (Exception ex) { _logger.Error($"Error exporting data! Error: {ex.Message}"); } } }
static void Main(string[] args) { ExceptionlessClient.Default.Startup("DyZCm8aZbNXf2iZ6BV00wY2UoR3U2tymh3cftNZs"); SetupNLog(); _logger = LogManager.GetLogger("EvtxECmd"); _fluentCommandLineParser = new FluentCommandLineParser <ApplicationArguments> { IsCaseSensitive = false }; _fluentCommandLineParser.Setup(arg => arg.File) .As('f') .WithDescription("File to process. This or -d is required\r\n"); _fluentCommandLineParser.Setup(arg => arg.Directory) .As('d') .WithDescription("Directory to process that contains SQLite files. This or -f is required"); _fluentCommandLineParser.Setup(arg => arg.CsvDirectory) .As("csv") .WithDescription( "Directory to save CSV formatted results to."); // This, --json, or --xml required _fluentCommandLineParser.Setup(arg => arg.JsonDirectory) .As("json") .WithDescription( "Directory to save JSON formatted results to.\r\n"); // This, --csv, or --xml required _fluentCommandLineParser.Setup(arg => arg.Dedupe) .As("dedupe") .WithDescription( "Deduplicate -f or -d files based on SHA-1. First file found wins. Default is TRUE") .SetDefault(true); _fluentCommandLineParser.Setup(arg => arg.Hunt) .As("hunt") .WithDescription( "When true, all files are looked at regardless of name and file header is used to identify SQLite files, else filename in map is used to find databases. Default is FALSE\r\n ") .SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.MapsDirectory) .As("maps") .WithDescription( "The path where event maps are located. Defaults to 'Maps' folder where program was executed\r\n ") .SetDefault(Path.Combine(BaseDirectory, "Maps")); _fluentCommandLineParser.Setup(arg => arg.Sync) .As("sync") .WithDescription( "If true, the latest maps from https://github.com/EricZimmerman/SQLECmd/tree/master/SQLMap/Maps are downloaded and local maps updated. Default is FALSE\r\n") .SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Debug) .As("debug") .WithDescription("Show debug information during processing").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Trace) .As("trace") .WithDescription("Show trace information during processing\r\n").SetDefault(false); var header = $"SQLECmd version {Assembly.GetExecutingAssembly().GetName().Version}" + "\r\n\r\nAuthor: Eric Zimmerman ([email protected])" + "\r\nhttps://github.com/EricZimmerman/SQLECmd"; var footer = @"Examples: SQLECmd.exe -f ""C:\Temp\someFile.db"" --csv ""c:\temp\out"" " + "\r\n\t " + @" SQLECmd.exe -d ""C:\Temp\"" --csv ""c:\temp\out""" + "\r\n\t " + @" SQLECmd.exe -d ""C:\Temp\"" --hunt --csv ""c:\temp\out""" + "\r\n\t " + "\r\n\t" + " Short options (single letter) are prefixed with a single dash. Long commands are prefixed with two dashes\r\n"; _fluentCommandLineParser.SetupHelp("?", "help") .WithHeader(header) .Callback(text => _logger.Info(text + "\r\n" + footer)); var result = _fluentCommandLineParser.Parse(args); if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } if (_fluentCommandLineParser.Object.Sync) { try { _logger.Info(header); UpdateFromRepo(); } catch (Exception e) { _logger.Error(e, $"There was an error checking for updates: {e.Message}"); } Environment.Exit(0); } if (_fluentCommandLineParser.Object.File.IsNullOrEmpty() && _fluentCommandLineParser.Object.Directory.IsNullOrEmpty()) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("-f or -d is required. Exiting"); return; } if (_fluentCommandLineParser.Object.CsvDirectory.IsNullOrEmpty()) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("--csv is required. Exiting"); return; } _logger.Info(header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}\r\n"); if (_fluentCommandLineParser.Object.Debug) { LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Debug); } if (_fluentCommandLineParser.Object.Trace) { LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Trace); } LogManager.ReconfigExistingLoggers(); DumpSqliteDll(); var sw = new Stopwatch(); sw.Start(); var ts = DateTimeOffset.UtcNow; if (Directory.Exists(_fluentCommandLineParser.Object.MapsDirectory) == false) { _logger.Warn( $"Maps directory '{_fluentCommandLineParser.Object.MapsDirectory}' does not exist! Database maps will not be loaded!!"); } else { _logger.Debug($"Loading maps from '{Path.GetFullPath(_fluentCommandLineParser.Object.MapsDirectory)}'"); var errors = SQLMap.LoadMaps(Path.GetFullPath(_fluentCommandLineParser.Object.MapsDirectory)); if (errors) { return; } _logger.Info($"Maps loaded: {SQLMap.MapFiles.Count:N0}"); } if (Directory.Exists(_fluentCommandLineParser.Object.CsvDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.CsvDirectory}' doesn't exist. Creating..."); try { Directory.CreateDirectory(_fluentCommandLineParser.Object.CsvDirectory); } catch (Exception) { _logger.Fatal( $"Unable to create directory '{_fluentCommandLineParser.Object.CsvDirectory}'. Does a file with the same name exist? Exiting"); return; } } if (_fluentCommandLineParser.Object.File.IsNullOrEmpty() == false) { if (File.Exists(_fluentCommandLineParser.Object.File) == false) { _logger.Warn($"'{_fluentCommandLineParser.Object.File}' does not exist! Exiting"); return; } ProcessFile(Path.GetFullPath(_fluentCommandLineParser.Object.File)); } else { //Directories _logger.Info($"Looking for files in '{_fluentCommandLineParser.Object.Directory}'"); _logger.Info(""); Privilege[] privs = { Privilege.EnableDelegation, Privilege.Impersonate, Privilege.Tcb }; using (new PrivilegeEnabler(Privilege.Backup, privs)) { var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; var f = new DirectoryEnumerationFilters { RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink, ErrorFilter = (errorCode, errorMessage, pathProcessed) => true }; var dbNames = new HashSet <string>(StringComparer.InvariantCultureIgnoreCase); if (_fluentCommandLineParser.Object.Hunt) { f.InclusionFilter = fsei => true; } else { foreach (var mapFile in SQLMap.MapFiles) { dbNames.Add(mapFile.Value.FileName); } f.InclusionFilter = fsei => dbNames.Contains(fsei.FileName); } var files2 = Directory.EnumerateFileSystemEntries(Path.GetFullPath(_fluentCommandLineParser.Object.Directory), dirEnumOptions, f); foreach (var file in files2) { try { ProcessFile(file); } catch (Exception e) { _logger.Error($"Error processing '{file}': {e.Message}"); } } } } sw.Stop(); if (UnmatchedDbs.Any()) { Console.WriteLine(); _logger.Fatal($"At least one database was found with no corresponding map (Use --debug for more details about discovery process)"); foreach (var unmatchedDb in UnmatchedDbs) { DumpUnmatched(unmatchedDb); } } var extra = string.Empty; if (ProcessedFiles.Count > 1) { extra = "s"; } _logger.Info($"\r\nProcessed {ProcessedFiles.Count:N0} file{extra} in {sw.Elapsed.TotalSeconds:N4} seconds\r\n"); if (!File.Exists("SQLite.Interop.dll")) { return; } try { File.Delete("SQLite.Interop.dll"); } catch (Exception) { _logger.Warn("Unable to delete 'SQLite.Interop.dll'. Delete manually if needed.\r\n"); } }
private static void Main(string[] args) { ExceptionlessClient.Default.Startup("wPXTiiouhEbK0s19lCgjiDThpfrW0ODU8RskdPEk"); SetupNLog(); _logger = LogManager.GetCurrentClassLogger(); _fluentCommandLineParser = new FluentCommandLineParser <ApplicationArguments> { IsCaseSensitive = false }; _fluentCommandLineParser.Setup(arg => arg.FileDb) .As('f') .WithDescription("SRUDB.dat file to process. Either this or -d is required"); _fluentCommandLineParser.Setup(arg => arg.FileReg) .As('r') .WithDescription("SOFTWARE hive to process. This is optional, but recommended\r\n"); _fluentCommandLineParser.Setup(arg => arg.Directory) .As('d') .WithDescription("Directory to recursively process, looking for SRUDB.dat and SOFTWARE hive. This mode is primarily used with KAPE so both SRUDB.dat and SOFTWARE hive can be located"); _fluentCommandLineParser.Setup(arg => arg.CsvDirectory) .As("csv") .WithDescription( "Directory to save CSV formatted results to. Be sure to include the full path in double quotes\r\n"); _fluentCommandLineParser.Setup(arg => arg.DateTimeFormat) .As("dt") .WithDescription( "The custom date/time format to use when displaying time stamps. Default is: yyyy-MM-dd HH:mm:ss.fffffff\r\n") .SetDefault("yyyy-MM-dd HH:mm:ss.fffffff"); _fluentCommandLineParser.Setup(arg => arg.Debug) .As("debug") .WithDescription("Show debug information during processing").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Trace) .As("trace") .WithDescription("Show trace information during processing\r\n").SetDefault(false); var header = $"SrumECmd version {Assembly.GetExecutingAssembly().GetName().Version}" + "\r\n\r\nAuthor: Eric Zimmerman ([email protected])" + "\r\nhttps://github.com/EricZimmerman/Srum"; var footer = @"Examples: SrumECmd.exe -f ""C:\Temp\SRUDB.dat"" -r ""C:\Temp\SOFTWARE"" --csv ""C:\Temp\"" " + "\r\n\t " + @" SrumECmd.exe -f ""C:\Temp\SRUDB.dat"" --csv ""c:\temp""" + "\r\n\t " + @" SrumECmd.exe -d ""C:\Temp"" --csv ""c:\temp""" + "\r\n\t " + "\r\n\t" + " Short options (single letter) are prefixed with a single dash. Long commands are prefixed with two dashes\r\n"; _fluentCommandLineParser.SetupHelp("?", "help") .WithHeader(header) .Callback(text => _logger.Info(text + "\r\n" + footer)); var result = _fluentCommandLineParser.Parse(args); if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } if (_fluentCommandLineParser.Object.FileDb.IsNullOrEmpty() && _fluentCommandLineParser.Object.Directory.IsNullOrEmpty()) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("Either -f or -d is required. Exiting\r\n"); return; } if (_fluentCommandLineParser.Object.FileDb.IsNullOrEmpty() == false && !File.Exists(_fluentCommandLineParser.Object.FileDb)) { _logger.Warn($"File '{_fluentCommandLineParser.Object.FileDb}' not found. Exiting"); return; } if (_fluentCommandLineParser.Object.Directory.IsNullOrEmpty() == false && !Directory.Exists(_fluentCommandLineParser.Object.Directory)) { _logger.Warn($"Directory '{_fluentCommandLineParser.Object.Directory}' not found. Exiting"); return; } if (_fluentCommandLineParser.Object.CsvDirectory.IsNullOrEmpty()) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("--csv is required. Exiting\r\n"); return; } _logger.Info(header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}\r\n"); if (IsAdministrator() == false) { _logger.Fatal("Warning: Administrator privileges not found!\r\n"); } if (_fluentCommandLineParser.Object.Debug) { LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Debug); } if (_fluentCommandLineParser.Object.Trace) { LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Trace); } LogManager.ReconfigExistingLoggers(); var sw = new Stopwatch(); sw.Start(); var ts = DateTimeOffset.UtcNow; CsvWriter _csvWriter = null; StreamWriter _swCsv = null; Srum sr = null; if (_fluentCommandLineParser.Object.Directory.IsNullOrEmpty() == false) { //kape mode, so find the files var f = new DirectoryEnumerationFilters(); f.InclusionFilter = fsei => { if (fsei.FileSize == 0) { return(false); } if (fsei.FileName.ToUpperInvariant() == "SRUDB.DAT") { return(true); } return(false); }; f.RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; f.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; var files2 = Directory.EnumerateFileSystemEntries(_fluentCommandLineParser.Object.Directory, dirEnumOptions, f); _fluentCommandLineParser.Object.FileDb = files2.FirstOrDefault(); if (_fluentCommandLineParser.Object.FileDb.IsNullOrEmpty()) { _logger.Warn("Did not locate any files named 'SRUDB.dat'! Exiting"); return; } _logger.Info($"Found SRUM database file '{_fluentCommandLineParser.Object.FileDb}'!"); f = new DirectoryEnumerationFilters(); f.InclusionFilter = fsei => { if (fsei.FileSize == 0) { return(false); } if (fsei.FileName.ToUpperInvariant() == "SOFTWARE") { return(true); } return(false); }; f.RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; f.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; files2 = Directory.EnumerateFileSystemEntries(_fluentCommandLineParser.Object.Directory, dirEnumOptions, f); _fluentCommandLineParser.Object.FileReg = files2.FirstOrDefault(); if (_fluentCommandLineParser.Object.FileReg.IsNullOrEmpty()) { _logger.Warn("Did not locate any files named 'SOFTWARE'! Registry data will not be extracted"); } else { _logger.Info($"Found SOFTWARE hive '{_fluentCommandLineParser.Object.FileReg}'!"); } Console.WriteLine(); } try { _logger.Info($"Processing '{_fluentCommandLineParser.Object.FileDb}'..."); sr = new Srum(_fluentCommandLineParser.Object.FileDb, _fluentCommandLineParser.Object.FileReg); _logger.Warn("\r\nProcessing complete!\r\n"); _logger.Info($"{"Energy Usage count:".PadRight(30)} {sr.EnergyUsages.Count:N0}"); _logger.Info($"{"Unknown 312 count:".PadRight(30)} {sr.Unknown312s.Count:N0}"); _logger.Info($"{"Unknown D8F count:".PadRight(30)} {sr.UnknownD8Fs.Count:N0}"); _logger.Info($"{"App Resource Usage count:".PadRight(30)} {sr.AppResourceUseInfos.Count:N0}"); _logger.Info($"{"Network Connection count:".PadRight(30)} {sr.NetworkConnections.Count:N0}"); _logger.Info($"{"Network Usage count:".PadRight(30)} {sr.NetworkUsages.Count:N0}"); _logger.Info($"{"Push Notification count:".PadRight(30)} {sr.PushNotifications.Count:N0}"); Console.WriteLine(); } catch (Exception e) { _logger.Error($"Error processing file! Message: {e.Message}.\r\n\r\nThis almost always means the database is dirty and must be repaired. This can be verified by running 'esentutl.exe /mh SRUDB.dat' and examining the 'State' property"); Console.WriteLine(); _logger.Info("If the database is dirty, **make a copy of your files**, ensure all files in the directory are not Read-only, open a PowerShell session as an admin, and repair by using the following commands (change directories to the location of SRUDB.dat first):\r\n\r\n'esentutl.exe /r sru /i'\r\n'esentutl.exe /p SRUDB.dat'\r\n\r\n"); Environment.Exit(0); } if (_fluentCommandLineParser.Object.CsvDirectory.IsNullOrEmpty() == false) { if (Directory.Exists(_fluentCommandLineParser.Object.CsvDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.CsvDirectory}' doesn't exist. Creating..."); try { Directory.CreateDirectory(_fluentCommandLineParser.Object.CsvDirectory); } catch (Exception) { _logger.Fatal( $"Unable to create directory '{_fluentCommandLineParser.Object.CsvDirectory}'. Does a file with the same name exist? Exiting"); return; } } var outName = string.Empty; var outFile = string.Empty; _logger.Warn($"CSV output will be saved to '{_fluentCommandLineParser.Object.CsvDirectory}'\r\n"); try { _logger.Debug($"Dumping Energy Usage tables '{EnergyUsage.TableName}'"); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_EnergyUsage_Output.csv"; outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); _csvWriter = new CsvWriter(_swCsv, CultureInfo.InvariantCulture); var foo = _csvWriter.Configuration.AutoMap <EnergyUsage>(); foo.Map(t => t.Timestamp).ConvertUsing(t => $"{t.Timestamp:yyyy-MM-dd HH:mm:ss}"); foo.Map(t => t.EventTimestamp).ConvertUsing(t => $"{t.EventTimestamp?.ToString(_fluentCommandLineParser.Object.DateTimeFormat)}"); _csvWriter.Configuration.RegisterClassMap(foo); _csvWriter.WriteHeader <EnergyUsage>(); _csvWriter.NextRecord(); _csvWriter.WriteRecords(sr.EnergyUsages.Values); _csvWriter.Flush(); _swCsv.Flush(); } catch (Exception e) { _logger.Error($"Error exporting 'EnergyUsage' data! Error: {e.Message}"); } try { _logger.Debug($"Dumping Unknown 312 table '{Unknown312.TableName}'"); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_Unknown312_Output.csv"; outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); _csvWriter = new CsvWriter(_swCsv, CultureInfo.InvariantCulture); var foo = _csvWriter.Configuration.AutoMap <Unknown312>(); foo.Map(t => t.Timestamp).ConvertUsing(t => $"{t.Timestamp:yyyy-MM-dd HH:mm:ss}"); foo.Map(t => t.EndTime).ConvertUsing(t => $"{t.EndTime.ToString(_fluentCommandLineParser.Object.DateTimeFormat)}"); _csvWriter.Configuration.RegisterClassMap(foo); _csvWriter.WriteHeader <Unknown312>(); _csvWriter.NextRecord(); _csvWriter.WriteRecords(sr.Unknown312s.Values); _csvWriter.Flush(); _swCsv.Flush(); } catch (Exception e) { _logger.Error($"Error exporting 'Unknown312' data! Error: {e.Message}"); } try { _logger.Debug($"Dumping Unknown D8F table '{UnknownD8F.TableName}'"); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_UnknownD8F_Output.csv"; outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); _csvWriter = new CsvWriter(_swCsv, CultureInfo.InvariantCulture); var foo = _csvWriter.Configuration.AutoMap <UnknownD8F>(); foo.Map(t => t.Timestamp).ConvertUsing(t => $"{t.Timestamp:yyyy-MM-dd HH:mm:ss}"); foo.Map(t => t.EndTime).ConvertUsing(t => $"{t.EndTime.ToString(_fluentCommandLineParser.Object.DateTimeFormat)}"); foo.Map(t => t.StartTime).ConvertUsing(t => $"{t.StartTime.ToString(_fluentCommandLineParser.Object.DateTimeFormat)}"); _csvWriter.Configuration.RegisterClassMap(foo); _csvWriter.WriteHeader <UnknownD8F>(); _csvWriter.NextRecord(); _csvWriter.WriteRecords(sr.UnknownD8Fs.Values); _csvWriter.Flush(); _swCsv.Flush(); } catch (Exception e) { _logger.Error($"Error exporting 'UnknownD8F' data! Error: {e.Message}"); } try { _logger.Debug($"Dumping AppResourceUseInfo table '{AppResourceUseInfo.TableName}'"); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_AppResourceUseInfo_Output.csv"; outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); _csvWriter = new CsvWriter(_swCsv, CultureInfo.InvariantCulture); var foo = _csvWriter.Configuration.AutoMap <AppResourceUseInfo>(); foo.Map(t => t.Timestamp).ConvertUsing(t => $"{t.Timestamp:yyyy-MM-dd HH:mm:ss}"); _csvWriter.Configuration.RegisterClassMap(foo); _csvWriter.WriteHeader <AppResourceUseInfo>(); _csvWriter.NextRecord(); _csvWriter.WriteRecords(sr.AppResourceUseInfos.Values); _csvWriter.Flush(); _swCsv.Flush(); } catch (Exception e) { _logger.Error($"Error exporting 'AppResourceUseInfo' data! Error: {e.Message}"); } try { _logger.Debug($"Dumping NetworkConnection table '{NetworkConnection.TableName}'"); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_NetworkConnections_Output.csv"; outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); _csvWriter = new CsvWriter(_swCsv, CultureInfo.InvariantCulture); var foo = _csvWriter.Configuration.AutoMap <NetworkConnection>(); foo.Map(t => t.Timestamp).ConvertUsing(t => $"{t.Timestamp:yyyy-MM-dd HH:mm:ss}"); foo.Map(t => t.ConnectStartTime).ConvertUsing(t => $"{t.ConnectStartTime.ToString(_fluentCommandLineParser.Object.DateTimeFormat)}"); _csvWriter.Configuration.RegisterClassMap(foo); _csvWriter.WriteHeader <NetworkConnection>(); _csvWriter.NextRecord(); _csvWriter.WriteRecords(sr.NetworkConnections.Values); _csvWriter.Flush(); _swCsv.Flush(); } catch (Exception e) { _logger.Error($"Error exporting 'NetworkConnection' data! Error: {e.Message}"); } try { _logger.Debug($"Dumping NetworkUsage table '{NetworkUsage.TableName}'"); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_NetworkUsages_Output.csv"; outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); _csvWriter = new CsvWriter(_swCsv, CultureInfo.InvariantCulture); var foo = _csvWriter.Configuration.AutoMap <NetworkUsage>(); foo.Map(t => t.Timestamp).ConvertUsing(t => $"{t.Timestamp:yyyy-MM-dd HH:mm:ss}"); _csvWriter.Configuration.RegisterClassMap(foo); _csvWriter.WriteHeader <NetworkUsage>(); _csvWriter.NextRecord(); _csvWriter.WriteRecords(sr.NetworkUsages.Values); _csvWriter.Flush(); _swCsv.Flush(); } catch (Exception e) { _logger.Error($"Error exporting 'NetworkUsage' data! Error: {e.Message}"); } try { _logger.Debug($"Dumping PushNotification table '{PushNotification.TableName}'"); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_PushNotifications_Output.csv"; outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); _csvWriter = new CsvWriter(_swCsv, CultureInfo.InvariantCulture); var foo = _csvWriter.Configuration.AutoMap <PushNotification>(); foo.Map(t => t.Timestamp).ConvertUsing(t => $"{t.Timestamp:yyyy-MM-dd HH:mm:ss}"); _csvWriter.Configuration.RegisterClassMap(foo); _csvWriter.WriteHeader <PushNotification>(); _csvWriter.NextRecord(); _csvWriter.WriteRecords(sr.PushNotifications.Values); _csvWriter.Flush(); _swCsv.Flush(); } catch (Exception e) { _logger.Error($"Error exporting 'PushNotification' data! Error: {e.Message}"); } sw.Stop(); _logger.Debug(""); _logger.Error( $"Processing completed in {sw.Elapsed.TotalSeconds:N4} seconds\r\n"); } }
private static void DoWork(string f, string r, string d, string csv, string dt, bool debug, bool trace) { var levelSwitch = new LoggingLevelSwitch(); var template = "{Message:lj}{NewLine}{Exception}"; if (debug) { levelSwitch.MinimumLevel = LogEventLevel.Debug; template = "[{Timestamp:HH:mm:ss.fff} {Level:u3}] {Message:lj}{NewLine}{Exception}"; } if (trace) { levelSwitch.MinimumLevel = LogEventLevel.Verbose; template = "[{Timestamp:HH:mm:ss.fff} {Level:u3}] {Message:lj}{NewLine}{Exception}"; } var conf = new LoggerConfiguration() .WriteTo.Console(outputTemplate: template) .MinimumLevel.ControlledBy(levelSwitch); Log.Logger = conf.CreateLogger(); if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { Console.WriteLine(); Log.Fatal("Non-Windows platforms not supported due to the need to load ESI specific Windows libraries! Exiting..."); Console.WriteLine(); Environment.Exit(0); return; } if (f.IsNullOrEmpty() && d.IsNullOrEmpty()) { var helpBld = new HelpBuilder(LocalizationResources.Instance, Console.WindowWidth); var hc = new HelpContext(helpBld, _rootCommand, Console.Out); helpBld.Write(hc); Log.Warning("Either -f or -d is required. Exiting\r\n"); return; } if (f.IsNullOrEmpty() == false && !File.Exists(f)) { Log.Warning("File '{File}' not found. Exiting", f); return; } if (d.IsNullOrEmpty() == false && !Directory.Exists(d)) { Log.Warning("Directory '{D}' not found. Exiting", d); return; } if (csv.IsNullOrEmpty()) { var helpBld = new HelpBuilder(LocalizationResources.Instance, Console.WindowWidth); var hc = new HelpContext(helpBld, _rootCommand, Console.Out); helpBld.Write(hc); Log.Warning("--csv is required. Exiting\r\n"); return; } Log.Information("{Header}", Header); Console.WriteLine(); Log.Information("Command line: {Args}\r\n", string.Join(" ", _args)); if (IsAdministrator() == false) { Log.Warning("Warning: Administrator privileges not found!\r\n"); } var sw = new Stopwatch(); sw.Start(); var ts = DateTimeOffset.UtcNow; Srum sr = null; if (d.IsNullOrEmpty() == false) { IEnumerable <string> files2; #if NET6_0 var enumerationOptions = new EnumerationOptions { IgnoreInaccessible = true, MatchCasing = MatchCasing.CaseInsensitive, RecurseSubdirectories = true, AttributesToSkip = 0 }; files2 = Directory.EnumerateFileSystemEntries(d, "SRUDB.DAT", enumerationOptions); f = files2.FirstOrDefault(); if (f.IsNullOrEmpty()) { Log.Warning("Did not locate any files named 'SRUDB.dat'! Exiting"); return; } Log.Information("Found SRUM database file '{F}'!", f); files2 = Directory.EnumerateFileSystemEntries(d, "SOFTWARE", enumerationOptions); r = files2.FirstOrDefault(); if (r.IsNullOrEmpty()) { Log.Warning("Did not locate any files named 'SOFTWARE'! Registry data will not be extracted"); } else { Log.Information("Found SOFTWARE hive '{R}'!", r); } #elif NET462 //kape mode, so find the files var ilter = new DirectoryEnumerationFilters(); ilter.InclusionFilter = fsei => { if (fsei.FileSize == 0) { return(false); } if (fsei.FileName.ToUpperInvariant() == "SRUDB.DAT") { return(true); } return(false); }; ilter.RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; ilter.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; const DirectoryEnumerationOptions dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; files2 = Directory.EnumerateFileSystemEntries(d, dirEnumOptions, ilter); f = files2.FirstOrDefault(); if (f.IsNullOrEmpty()) { Log.Warning("Did not locate any files named 'SRUDB.dat'! Exiting"); return; } Log.Information("Found SRUM database file '{F}'!", f); ilter = new DirectoryEnumerationFilters(); ilter.InclusionFilter = fsei => { if (fsei.FileSize == 0) { return(false); } if (fsei.FileName.ToUpperInvariant() == "SOFTWARE") { return(true); } return(false); }; ilter.RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; ilter.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; files2 = Directory.EnumerateFileSystemEntries(d, dirEnumOptions, ilter); r = files2.FirstOrDefault(); if (r.IsNullOrEmpty()) { Log.Warning("Did not locate any files named 'SOFTWARE'! Registry data will not be extracted"); } else { Log.Information("Found SOFTWARE hive '{R}'!", r); } #endif Console.WriteLine(); } try { Log.Information("Processing '{F}'...", f); sr = new Srum(f, r); Console.WriteLine(); Log.Information("Processing complete!"); Console.WriteLine(); Log.Information("{EnergyUse} {EnergyUsagesCount:N0}", "Energy Usage count:".PadRight(30), sr.EnergyUsages.Count); Log.Information("{Unknown312s} {Unknown312sCount:N0}", "Unknown 312 count:".PadRight(30), sr.TimelineProviders.Count); Log.Information("{UnknownD8Fs} {UnknownD8FsCount:N0}", "Unknown D8F count:".PadRight(30), sr.Vfuprovs.Count); Log.Information("{AppResourceUseInfos} {AppResourceUseInfosCount:N0}", "App Resource Usage count:".PadRight(30), sr.AppResourceUseInfos.Count); Log.Information("{NetworkConnections} {NetworkConnectionsCount:N0}", "Network Connection count:".PadRight(30), sr.NetworkConnections.Count); Log.Information("{NetworkUsages} {NetworkUsagesCount}", "Network Usage count:".PadRight(30), sr.NetworkUsages.Count); Log.Information("{PushNotifications} {PushNotificationsCount:N0}", "Push Notification count:".PadRight(30), sr.PushNotifications.Count); Console.WriteLine(); } catch (Exception e) { Log.Error(e, "Error processing file! Message: {Message}.\r\n\r\nThis almost always means the database is dirty and must be repaired. This can be verified by running 'esentutl.exe /mh SRUDB.dat' and examining the 'State' property", e.Message); Console.WriteLine(); Log.Information( "If the database is dirty, **make a copy of your files**, ensure all files in the directory are not Read-only, open a PowerShell session as an admin, and repair by using the following commands (change directories to the location of SRUDB.dat first):\r\n\r\n'esentutl.exe /r sru /i'\r\n'esentutl.exe /p SRUDB.dat'\r\n\r\n"); Environment.Exit(0); } if (csv.IsNullOrEmpty() == false) { if (Directory.Exists(csv) == false) { Log.Information( "Path to '{Csv}' doesn't exist. Creating...", csv); try { Directory.CreateDirectory(csv); } catch (Exception) { Log.Fatal( "Unable to create directory '{Csv}'. Does a file with the same name exist? Exiting", csv); return; } } string outName; string outFile; Log.Information("CSV output will be saved to '{Csv}'\r\n", csv); StreamWriter swCsv; CsvWriter csvWriter; try { Log.Debug("Dumping Energy Usage tables '{TableName}'", EnergyUsage.TableName); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_EnergyUsage_Output.csv"; outFile = Path.Combine(csv, outName); swCsv = new StreamWriter(outFile, false, Encoding.UTF8); csvWriter = new CsvWriter(swCsv, CultureInfo.InvariantCulture); var foo = csvWriter.Context.AutoMap <EnergyUsage>(); foo.Map(t => t.Timestamp).Convert(t => $"{t.Value.Timestamp:yyyy-MM-dd HH:mm:ss}"); foo.Map(t => t.EventTimestamp).Convert(t => $"{t.Value.EventTimestamp?.ToString(dt)}"); csvWriter.Context.RegisterClassMap(foo); csvWriter.WriteHeader <EnergyUsage>(); csvWriter.NextRecord(); csvWriter.WriteRecords(sr.EnergyUsages.Values); csvWriter.Flush(); swCsv.Flush(); } catch (Exception e) { Log.Error(e, "Error exporting 'EnergyUsage' data! Error: {Message}", e.Message); } try { Log.Debug("Dumping Unknown 312 table '{TableName}'", TimelineProvider.TableName); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_Unknown312_Output.csv"; outFile = Path.Combine(csv, outName); swCsv = new StreamWriter(outFile, false, Encoding.UTF8); csvWriter = new CsvWriter(swCsv, CultureInfo.InvariantCulture); var foo = csvWriter.Context.AutoMap <TimelineProvider>(); foo.Map(t => t.Timestamp).Convert(t => $"{t.Value.Timestamp:yyyy-MM-dd HH:mm:ss}"); foo.Map(t => t.EndTime).Convert(t => $"{t.Value.EndTime.ToString(dt)}"); csvWriter.Context.RegisterClassMap(foo); csvWriter.WriteHeader <TimelineProvider>(); csvWriter.NextRecord(); csvWriter.WriteRecords(sr.TimelineProviders.Values); csvWriter.Flush(); swCsv.Flush(); } catch (Exception e) { Log.Error(e, "Error exporting 'Unknown312' data! Error: {Message}", e.Message); } try { Log.Debug("Dumping Unknown D8F table '{TableName}'", Vfuprov.TableName); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_UnknownD8F_Output.csv"; outFile = Path.Combine(csv, outName); swCsv = new StreamWriter(outFile, false, Encoding.UTF8); csvWriter = new CsvWriter(swCsv, CultureInfo.InvariantCulture); var foo = csvWriter.Context.AutoMap <Vfuprov>(); foo.Map(t => t.Timestamp).Convert(t => $"{t.Value.Timestamp:yyyy-MM-dd HH:mm:ss}"); foo.Map(t => t.EndTime).Convert(t => $"{t.Value.EndTime.ToString(dt)}"); foo.Map(t => t.StartTime).Convert(t => $"{t.Value.StartTime.ToString(dt)}"); csvWriter.Context.RegisterClassMap(foo); csvWriter.WriteHeader <Vfuprov>(); csvWriter.NextRecord(); csvWriter.WriteRecords(sr.Vfuprovs.Values); csvWriter.Flush(); swCsv.Flush(); } catch (Exception e) { Log.Error(e, "Error exporting 'UnknownD8F' data! Error: {Message}", e.Message); } try { Log.Debug("Dumping App Resource Use Info table '{TableName}'", AppResourceUseInfo.TableName); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_AppResourceUseInfo_Output.csv"; outFile = Path.Combine(csv, outName); swCsv = new StreamWriter(outFile, false, Encoding.UTF8); csvWriter = new CsvWriter(swCsv, CultureInfo.InvariantCulture); var foo = csvWriter.Context.AutoMap <AppResourceUseInfo>(); foo.Map(t => t.Timestamp).Convert(t => $"{t.Value.Timestamp:yyyy-MM-dd HH:mm:ss}"); csvWriter.Context.RegisterClassMap(foo); csvWriter.WriteHeader <AppResourceUseInfo>(); csvWriter.NextRecord(); csvWriter.WriteRecords(sr.AppResourceUseInfos.Values); csvWriter.Flush(); swCsv.Flush(); } catch (Exception e) { Log.Error(e, "Error exporting 'AppResourceUseInfo' data! Error: {Message}", e.Message); } try { Log.Debug("Dumping Network Connection table '{TableName}'", NetworkConnection.TableName); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_NetworkConnections_Output.csv"; outFile = Path.Combine(csv, outName); swCsv = new StreamWriter(outFile, false, Encoding.UTF8); csvWriter = new CsvWriter(swCsv, CultureInfo.InvariantCulture); var foo = csvWriter.Context.AutoMap <NetworkConnection>(); foo.Map(t => t.Timestamp).Convert(t => $"{t.Value.Timestamp:yyyy-MM-dd HH:mm:ss}"); foo.Map(t => t.ConnectStartTime).Convert(t => $"{t.Value.ConnectStartTime.ToString(dt)}"); csvWriter.Context.RegisterClassMap(foo); csvWriter.WriteHeader <NetworkConnection>(); csvWriter.NextRecord(); csvWriter.WriteRecords(sr.NetworkConnections.Values); csvWriter.Flush(); swCsv.Flush(); } catch (Exception e) { Log.Error(e, "Error exporting 'NetworkConnection' data! Error: {Message}", e.Message); } try { Log.Debug("Dumping Network Usage table '{TableName}'", NetworkUsage.TableName); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_NetworkUsages_Output.csv"; outFile = Path.Combine(csv, outName); swCsv = new StreamWriter(outFile, false, Encoding.UTF8); csvWriter = new CsvWriter(swCsv, CultureInfo.InvariantCulture); var foo = csvWriter.Context.AutoMap <NetworkUsage>(); foo.Map(t => t.Timestamp).Convert(t => $"{t.Value.Timestamp:yyyy-MM-dd HH:mm:ss}"); csvWriter.Context.RegisterClassMap(foo); csvWriter.WriteHeader <NetworkUsage>(); csvWriter.NextRecord(); csvWriter.WriteRecords(sr.NetworkUsages.Values); csvWriter.Flush(); swCsv.Flush(); } catch (Exception e) { Log.Error(e, "Error exporting 'NetworkUsage' data! Error: {Message}", e.Message); } try { Log.Debug("Dumping Push Notification table '{TableName}'", PushNotification.TableName); outName = $"{ts:yyyyMMddHHmmss}_SrumECmd_PushNotifications_Output.csv"; outFile = Path.Combine(csv, outName); swCsv = new StreamWriter(outFile, false, Encoding.UTF8); csvWriter = new CsvWriter(swCsv, CultureInfo.InvariantCulture); var foo = csvWriter.Context.AutoMap <PushNotification>(); foo.Map(t => t.Timestamp).Convert(t => $"{t.Value.Timestamp:yyyy-MM-dd HH:mm:ss}"); csvWriter.Context.RegisterClassMap(foo); csvWriter.WriteHeader <PushNotification>(); csvWriter.NextRecord(); csvWriter.WriteRecords(sr.PushNotifications.Values); csvWriter.Flush(); swCsv.Flush(); } catch (Exception e) { Log.Error(e, "Error exporting 'PushNotification' data! Error: {Message}", e.Message); } sw.Stop(); Log.Information("Processing completed in {TotalSeconds:N4} seconds\r\n", sw.Elapsed.TotalSeconds); } }
/// <summary> /// The daemon code calls this occasionally to poke it into action to do work /// </summary> /// <param name="daemon"></param> public void ExecuteBackgroundProcess(Daemon daemon) { // We don't want to start watching files until the library is loaded... if (!(LibraryRef?.Xlibrary.LibraryIsLoaded ?? false)) { Logging.Info("Library is not yet loaded, so waiting before watching..."); // Indicate that the library may still not have been changed... FolderContentsHaveChanged = true; return; } // Update our folder system watcher if necessary CheckIfFolderNameHasChanged(); // If the current folder is blank, do nothing if (String.IsNullOrEmpty(configured_folder_to_watch)) { return; } // If the folder does not exist, do nothing if (!Directory.Exists(configured_folder_to_watch)) { Logging.Info("Watched folder {0} does not exist: watching this directory has been disabled.", configured_folder_to_watch); return; } // If the folder or its contents has not changed since the last time, do nothing if (!FolderContentsHaveChanged) { return; } if (!ConfigurationManager.IsEnabled(nameof(FolderWatcher))) { Logging.Info("Watched folder {0} will not be watched/scanned due to Developer Override setting {1}=false", configured_folder_to_watch, nameof(FolderWatcher)); return; } Stopwatch breathing_time = Stopwatch.StartNew(); Logging.Debug("FolderWatcher BEGIN"); // To recover from a fatal library failure and re-indexing attempt for very large libraries, // we're better off processing a limited number of source files as we'll be able to see // *some* results more quickly and we'll have a working, though yet incomplete, // index in *reasonable time*. // // To reconstruct the entire index will take a *long* time. We grow the index and other meta // stores a bunch-of-files at a time and then repeat the entire maintenance process until // we'll be sure to have run out of files to process for sure... // Mark that we are now processing the folder while (TestAndReset_FolderContentsHaveChanged()) { // If this library is busy, skip it for now if (Library.IsBusyAddingPDFs || Library.IsBusyRegeneratingTags) { Logging.Debug特("FolderWatcher: Not daemon processing any library that is busy with adds..."); FolderContentsHaveChanged = true; break; } if (ShutdownableManager.Instance.IsShuttingDown) { Logging.Debug特("FolderWatcher: Breaking out of outer processing loop due to daemon termination"); FolderContentsHaveChanged = true; break; } if (Qiqqa.Common.Configuration.ConfigurationManager.Instance.ConfigurationRecord.DisableAllBackgroundTasks) { Logging.Debug特("FolderWatcher: Breaking out of outer processing loop due to DisableAllBackgroundTasks"); FolderContentsHaveChanged = true; break; } if (LibraryRef == null || folder_watcher_manager?.TypedTarget == null) { Logging.Debug特("FolderWatcher: Breaking out of outer processing loop due to disposed library and/or watch manager"); FolderContentsHaveChanged = true; break; } if (!ConfigurationManager.IsEnabled(nameof(FolderWatcher))) { Logging.Info("Watched folder {0} will not be watched/scanned due to Developer Override setting {1}=false", configured_folder_to_watch, nameof(FolderWatcher)); break; } // reset counters for logging/reporting: watch_stats.Reset(daemon); // If we get this far then there might be some work to do in the folder... Stopwatch clk = Stopwatch.StartNew(); // // Summary: // [AlphaFS] Specifies a set of custom filters to be used with enumeration methods // of Alphaleonis.Win32.Filesystem.Directory, e.g., Alphaleonis.Win32.Filesystem.Directory.EnumerateDirectories(System.String), // Alphaleonis.Win32.Filesystem.Directory.EnumerateFiles(System.String), or Alphaleonis.Win32.Filesystem.Directory.EnumerateFileSystemEntries(System.String). // // Remarks: // Alphaleonis.Win32.Filesystem.DirectoryEnumerationFilters allows scenarios in // which files/directories being enumerated by the methods of Alphaleonis.Win32.Filesystem.Directory // class are accepted only if they match the search pattern, attributes (see Alphaleonis.Win32.Filesystem.DirectoryEnumerationOptions.SkipReparsePoints), // and optionally also the custom criteria tested in the method whose delegate is // specified in Alphaleonis.Win32.Filesystem.DirectoryEnumerationFilters.InclusionFilter. // These criteria could be, e.g., file size exceeding some threshold, pathname matches // a complex regular expression, etc. If the enumeration process is set to be recursive // (see Alphaleonis.Win32.Filesystem.DirectoryEnumerationOptions.Recursive) and // Alphaleonis.Win32.Filesystem.DirectoryEnumerationFilters.RecursionFilter is specified, // the directory is traversed recursively only if it matches the custom criteria // in Alphaleonis.Win32.Filesystem.DirectoryEnumerationFilters.RecursionFilter method. // This allows, for example, custom handling of junctions and symbolic links, e.g., // detection of cycles. If any error occurs during the enumeration and the enumeration // process is not set to ignore errors (see Alphaleonis.Win32.Filesystem.DirectoryEnumerationOptions.ContinueOnException), // an exception is thrown unless the error is handled (filtered out) by the method // specified in Alphaleonis.Win32.Filesystem.DirectoryEnumerationFilters.ErrorFilter // (if specified). The method may, for example, consume the error by reporting it // in a log, so that the enumeration continues as in the case of Alphaleonis.Win32.Filesystem.DirectoryEnumerationOptions.ContinueOnException // option but the user will be informed about errors. // global_watch_stats.Inc(); DirectoryEnumerationFilters filter = new DirectoryEnumerationFilters(); filter.ErrorFilter = DecideIfErrorDuringDirScan; filter.InclusionFilter = DecideIfIncludeDuringDirScan; filter.RecursionFilter = DecideIfRecurseDuringDirScan; // Note: don't use the CancellationToken, just throw an exception in the InclusionFilter when it's time to abort the scan. //filter.CancellationToken = null; IEnumerable <string> filenames_in_folder = Directory.EnumerateFiles(configured_folder_to_watch, DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.BasicSearch | //DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.LargeCache | DirectoryEnumerationOptions.Recursive, filter); // SearchOption.AllDirectories); Logging.Debug特("Directory.EnumerateFiles took {0} ms", clk.ElapsedMilliseconds); // Do NOT count files which are already present in our library/DB, // despite the fact that those also *do* take time and effort to check // in the code above. // // The issue here is that when we would import files A,B,C,D,E,F,G,H,I,J,K, // we would do so in tiny batches, resulting in a rescan after each batch // where the already processed files will be included in the set, but must // be filtered out as 'already in there' in the code above. // Iff we had counted *all* files we inspect from the Watch Directory, // we would never make it batch the first batch as then our count limit // would trigger already for every round through here! List <string> filenames_that_are_new = new List <string>(); foreach (string filename in filenames_in_folder) { Logging.Info("FolderWatcher: {0} of {1} files have been processed/inspected (total {2} scanned, {3} skipped, {4} ignored)", watch_stats.processed_file_count, watch_stats.processing_file_count, watch_stats.scanned_file_count, watch_stats.skipped_file_count, watch_stats.scanned_file_count - watch_stats.skipped_file_count - watch_stats.processing_file_count); try { // check the file once again: it MAY have disappeared while we were slowly scanning the remainder of the dirtree. FileSystemEntryInfo info = File.GetFileSystemEntryInfo(filename); watch_stats.processing_file_count++; Logging.Info("FolderWatcher is importing {0}", filename); filenames_that_are_new.Add(filename); } catch (Exception ex) { Logging.Error(ex, "Folder Watcher: skipping file {0} due to file I/O error {1}", filename, ex.Message); } } Logging.Debug特("Directory.EnumerateFiles took {0} ms", clk.ElapsedMilliseconds); // Create the import records List <FilenameWithMetadataImport> filename_with_metadata_imports = new List <FilenameWithMetadataImport>(); foreach (var filename in filenames_that_are_new) { filename_with_metadata_imports.Add(new FilenameWithMetadataImport { filename = filename, tags = new HashSet <string>(tags) }); #if false // delay until the PDF has actually been processed completely! // // Add this file to the list of processed files... folder_watcher_manager.RememberProcessedFile(filename); #endif } // Get the library to import all these new files if (filename_with_metadata_imports.Count > 0) { ImportingIntoLibrary.AddNewPDFDocumentsToLibraryWithMetadata_SYNCHRONOUS(LibraryRef, true, filename_with_metadata_imports.ToArray()); // TODO: refactor the ImportingIntoLibrary class } watch_stats.processed_file_count = watch_stats.processing_file_count; Logging.Info("FolderWatcher: {0} of {1} files have been processed/inspected (total {2} scanned, {3} skipped, {4} ignored)", watch_stats.processed_file_count, watch_stats.processing_file_count, watch_stats.scanned_file_count, watch_stats.skipped_file_count, watch_stats.scanned_file_count - watch_stats.skipped_file_count - watch_stats.processing_file_count); if (watch_stats.index_processing_clock.ElapsedMilliseconds >= FolderWatcher.MAX_SECONDS_PER_ITERATION) { Logging.Info("FolderWatcher: Taking a nap due to MAX_SECONDS_PER_ITERATION: {0} seconds consumed, {1} threads pending", watch_stats.index_processing_clock.ElapsedMilliseconds / 1E3, SafeThreadPool.QueuedThreadCount); watch_stats.daemon.Sleep(SECONDS_TO_RELAX_PER_ITERATION); watch_stats.index_processing_clock.Restart(); } Logging.Debug("FolderWatcher End-Of-Round ({0} ms)", clk.ElapsedMilliseconds); } Logging.Debug("FolderWatcher END"); }
private static void Main(string[] args) { ExceptionlessClient.Default.Startup("tYeWS6A5K5uItgpB44dnNy2qSb2xJxiQWRRGWebq"); SetupNLog(); _logger = LogManager.GetLogger("EvtxECmd"); _fluentCommandLineParser = new FluentCommandLineParser <ApplicationArguments> { IsCaseSensitive = false }; _fluentCommandLineParser.Setup(arg => arg.File) .As('f') .WithDescription("File to process. This or -d is required\r\n"); _fluentCommandLineParser.Setup(arg => arg.Directory) .As('d') .WithDescription("Directory to process that contains evtx files. This or -f is required"); _fluentCommandLineParser.Setup(arg => arg.CsvDirectory) .As("csv") .WithDescription( "Directory to save CSV formatted results to."); // This, --json, or --xml required _fluentCommandLineParser.Setup(arg => arg.CsvName) .As("csvf") .WithDescription( "File name to save CSV formatted results to. When present, overrides default name"); _fluentCommandLineParser.Setup(arg => arg.JsonDirectory) .As("json") .WithDescription( "Directory to save JSON formatted results to."); // This, --csv, or --xml required _fluentCommandLineParser.Setup(arg => arg.JsonName) .As("jsonf") .WithDescription( "File name to save JSON formatted results to. When present, overrides default name"); _fluentCommandLineParser.Setup(arg => arg.XmlDirectory) .As("xml") .WithDescription( "Directory to save XML formatted results to."); // This, --csv, or --json required _fluentCommandLineParser.Setup(arg => arg.XmlName) .As("xmlf") .WithDescription( "File name to save XML formatted results to. When present, overrides default name\r\n"); _fluentCommandLineParser.Setup(arg => arg.DateTimeFormat) .As("dt") .WithDescription( "The custom date/time format to use when displaying time stamps. Default is: yyyy-MM-dd HH:mm:ss.fffffff") .SetDefault("yyyy-MM-dd HH:mm:ss.fffffff"); _fluentCommandLineParser.Setup(arg => arg.IncludeIds) .As("inc") .WithDescription( "List of Event IDs to process. All others are ignored. Overrides --exc Format is 4624,4625,5410") .SetDefault(string.Empty); _fluentCommandLineParser.Setup(arg => arg.ExcludeIds) .As("exc") .WithDescription( "List of Event IDs to IGNORE. All others are included. Format is 4624,4625,5410") .SetDefault(string.Empty); _fluentCommandLineParser.Setup(arg => arg.StartDate) .As("sd") .WithDescription( "Start date for including events (UTC). Anything OLDER than this is dropped. Format should match --dt") .SetDefault(string.Empty); _fluentCommandLineParser.Setup(arg => arg.EndDate) .As("ed") .WithDescription( "End date for including events (UTC). Anything NEWER than this is dropped. Format should match --dt") .SetDefault(string.Empty); _fluentCommandLineParser.Setup(arg => arg.FullJson) .As("fj") .WithDescription( "When true, export all available data when using --json. Default is FALSE.") .SetDefault(false); // _fluentCommandLineParser.Setup(arg => arg.PayloadAsJson) // .As("pj") // .WithDescription( // "When true, include event *payload* as json. Default is TRUE.") // .SetDefault(true); _fluentCommandLineParser.Setup(arg => arg.TimeDiscrepancyThreshold) .As("tdt") .WithDescription( "The number of seconds to use for time discrepancy detection. Default is 1 second") .SetDefault(1); _fluentCommandLineParser.Setup(arg => arg.Metrics) .As("met") .WithDescription( "When true, show metrics about processed event log. Default is TRUE.\r\n") .SetDefault(true); _fluentCommandLineParser.Setup(arg => arg.MapsDirectory) .As("maps") .WithDescription( "The path where event maps are located. Defaults to 'Maps' folder where program was executed\r\n ") .SetDefault(Path.Combine(BaseDirectory, "Maps")); _fluentCommandLineParser.Setup(arg => arg.Vss) .As("vss") .WithDescription( "Process all Volume Shadow Copies that exist on drive specified by -f or -d . Default is FALSE") .SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Dedupe) .As("dedupe") .WithDescription( "Deduplicate -f or -d & VSCs based on SHA-1. First file found wins. Default is TRUE\r\n") .SetDefault(true); _fluentCommandLineParser.Setup(arg => arg.Sync) .As("sync") .WithDescription( "If true, the latest maps from https://github.com/EricZimmerman/evtx/tree/master/evtx/Maps are downloaded and local maps updated. Default is FALSE\r\n") .SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Debug) .As("debug") .WithDescription("Show debug information during processing").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Trace) .As("trace") .WithDescription("Show trace information during processing\r\n").SetDefault(false); var header = $"EvtxECmd version {Assembly.GetExecutingAssembly().GetName().Version}" + "\r\n\r\nAuthor: Eric Zimmerman ([email protected])" + "\r\nhttps://github.com/EricZimmerman/evtx"; var footer = @"Examples: EvtxECmd.exe -f ""C:\Temp\Application.evtx"" --csv ""c:\temp\out"" --csvf MyOutputFile.csv" + "\r\n\t " + @" EvtxECmd.exe -f ""C:\Temp\Application.evtx"" --csv ""c:\temp\out""" + "\r\n\t " + @" EvtxECmd.exe -f ""C:\Temp\Application.evtx"" --json ""c:\temp\jsonout""" + "\r\n\t " + "\r\n\t" + " Short options (single letter) are prefixed with a single dash. Long commands are prefixed with two dashes\r\n"; _fluentCommandLineParser.SetupHelp("?", "help") .WithHeader(header) .Callback(text => _logger.Info(text + "\r\n" + footer)); var result = _fluentCommandLineParser.Parse(args); if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } if (_fluentCommandLineParser.Object.Sync) { try { _logger.Info(header); UpdateFromRepo(); } catch (Exception e) { _logger.Error(e, $"There was an error checking for updates: {e.Message}"); } Environment.Exit(0); } if (_fluentCommandLineParser.Object.File.IsNullOrEmpty() && _fluentCommandLineParser.Object.Directory.IsNullOrEmpty()) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("-f or -d is required. Exiting"); return; } _logger.Info(header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}\r\n"); if (IsAdministrator() == false) { _logger.Fatal("Warning: Administrator privileges not found!\r\n"); } if (_fluentCommandLineParser.Object.Debug) { LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Debug); } if (_fluentCommandLineParser.Object.Trace) { LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Trace); } LogManager.ReconfigExistingLoggers(); if (_fluentCommandLineParser.Object.Vss & (IsAdministrator() == false)) { _logger.Error("--vss is present, but administrator rights not found. Exiting\r\n"); return; } var sw = new Stopwatch(); sw.Start(); var ts = DateTimeOffset.UtcNow; _errorFiles = new Dictionary <string, int>(); if (_fluentCommandLineParser.Object.JsonDirectory.IsNullOrEmpty() == false) { if (Directory.Exists(_fluentCommandLineParser.Object.JsonDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.JsonDirectory}' doesn't exist. Creating..."); try { Directory.CreateDirectory(_fluentCommandLineParser.Object.JsonDirectory); } catch (Exception) { _logger.Fatal( $"Unable to create directory '{_fluentCommandLineParser.Object.JsonDirectory}'. Does a file with the same name exist? Exiting"); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.json"; if (_fluentCommandLineParser.Object.JsonName.IsNullOrEmpty() == false) { outName = Path.GetFileName(_fluentCommandLineParser.Object.JsonName); } var outFile = Path.Combine(_fluentCommandLineParser.Object.JsonDirectory, outName); _logger.Warn($"json output will be saved to '{outFile}'\r\n"); try { _swJson = new StreamWriter(outFile, false, Encoding.UTF8); } catch (Exception) { _logger.Error($"Unable to open '{outFile}'! Is it in use? Exiting!\r\n"); Environment.Exit(0); } JsConfig.DateHandler = DateHandler.ISO8601; } if (_fluentCommandLineParser.Object.XmlDirectory.IsNullOrEmpty() == false) { if (Directory.Exists(_fluentCommandLineParser.Object.XmlDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.XmlDirectory}' doesn't exist. Creating..."); try { Directory.CreateDirectory(_fluentCommandLineParser.Object.XmlDirectory); } catch (Exception) { _logger.Fatal( $"Unable to create directory '{_fluentCommandLineParser.Object.XmlDirectory}'. Does a file with the same name exist? Exiting"); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.xml"; if (_fluentCommandLineParser.Object.XmlName.IsNullOrEmpty() == false) { outName = Path.GetFileName(_fluentCommandLineParser.Object.XmlName); } var outFile = Path.Combine(_fluentCommandLineParser.Object.XmlDirectory, outName); _logger.Warn($"XML output will be saved to '{outFile}'\r\n"); try { _swXml = new StreamWriter(outFile, false, Encoding.UTF8); } catch (Exception) { _logger.Error($"Unable to open '{outFile}'! Is it in use? Exiting!\r\n"); Environment.Exit(0); } } if (_fluentCommandLineParser.Object.StartDate.IsNullOrEmpty() == false) { if (DateTimeOffset.TryParse(_fluentCommandLineParser.Object.StartDate, null, DateTimeStyles.AssumeUniversal, out var dt)) { _startDate = dt; _logger.Info($"Setting Start date to '{_startDate.Value.ToUniversalTime().ToString(_fluentCommandLineParser.Object.DateTimeFormat)}'"); } else { _logger.Warn($"Could not parse '{_fluentCommandLineParser.Object.StartDate}' to a valud datetime! Events will not be filtered by Start date!"); } } if (_fluentCommandLineParser.Object.EndDate.IsNullOrEmpty() == false) { if (DateTimeOffset.TryParse(_fluentCommandLineParser.Object.EndDate, null, DateTimeStyles.AssumeUniversal, out var dt)) { _endDate = dt; _logger.Info($"Setting End date to '{_endDate.Value.ToUniversalTime().ToString(_fluentCommandLineParser.Object.DateTimeFormat)}'"); } else { _logger.Warn($"Could not parse '{_fluentCommandLineParser.Object.EndDate}' to a valud datetime! Events will not be filtered by End date!"); } } if (_startDate.HasValue || _endDate.HasValue) { _logger.Info(""); } if (_fluentCommandLineParser.Object.CsvDirectory.IsNullOrEmpty() == false) { if (Directory.Exists(_fluentCommandLineParser.Object.CsvDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.CsvDirectory}' doesn't exist. Creating..."); try { Directory.CreateDirectory(_fluentCommandLineParser.Object.CsvDirectory); } catch (Exception) { _logger.Fatal( $"Unable to create directory '{_fluentCommandLineParser.Object.CsvDirectory}'. Does a file with the same name exist? Exiting"); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.csv"; if (_fluentCommandLineParser.Object.CsvName.IsNullOrEmpty() == false) { outName = Path.GetFileName(_fluentCommandLineParser.Object.CsvName); } var outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _logger.Warn($"CSV output will be saved to '{outFile}'\r\n"); try { _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); var opt = new CsvConfiguration(CultureInfo.InvariantCulture) { ShouldUseConstructorParameters = _ => false }; _csvWriter = new CsvWriter(_swCsv, opt); } catch (Exception) { _logger.Error($"Unable to open '{outFile}'! Is it in use? Exiting!\r\n"); Environment.Exit(0); } var foo = _csvWriter.Context.AutoMap <EventRecord>(); // if (_fluentCommandLineParser.Object.PayloadAsJson == false) // { // foo.Map(t => t.Payload).Ignore(); // } // else // { // // } foo.Map(t => t.RecordPosition).Ignore(); foo.Map(t => t.Size).Ignore(); foo.Map(t => t.Timestamp).Ignore(); foo.Map(t => t.RecordNumber).Index(0); foo.Map(t => t.EventRecordId).Index(1); foo.Map(t => t.TimeCreated).Index(2); foo.Map(t => t.TimeCreated).Convert(t => $"{t.Value.TimeCreated.ToString(_fluentCommandLineParser.Object.DateTimeFormat)}"); foo.Map(t => t.EventId).Index(3); foo.Map(t => t.Level).Index(4); foo.Map(t => t.Provider).Index(5); foo.Map(t => t.Channel).Index(6); foo.Map(t => t.ProcessId).Index(7); foo.Map(t => t.ThreadId).Index(8); foo.Map(t => t.Computer).Index(9); foo.Map(t => t.UserId).Index(10); foo.Map(t => t.MapDescription).Index(11); foo.Map(t => t.UserName).Index(12); foo.Map(t => t.RemoteHost).Index(13); foo.Map(t => t.PayloadData1).Index(14); foo.Map(t => t.PayloadData2).Index(15); foo.Map(t => t.PayloadData3).Index(16); foo.Map(t => t.PayloadData4).Index(17); foo.Map(t => t.PayloadData5).Index(18); foo.Map(t => t.PayloadData6).Index(19); foo.Map(t => t.ExecutableInfo).Index(20); foo.Map(t => t.HiddenRecord).Index(21); foo.Map(t => t.SourceFile).Index(22); foo.Map(t => t.Keywords).Index(23); foo.Map(t => t.Payload).Index(24); _csvWriter.Context.RegisterClassMap(foo); _csvWriter.WriteHeader <EventRecord>(); _csvWriter.NextRecord(); } if (Directory.Exists(_fluentCommandLineParser.Object.MapsDirectory) == false) { _logger.Warn( $"Maps directory '{_fluentCommandLineParser.Object.MapsDirectory}' does not exist! Event ID maps will not be loaded!!"); } else { _logger.Debug($"Loading maps from '{Path.GetFullPath(_fluentCommandLineParser.Object.MapsDirectory)}'"); var errors = EventLog.LoadMaps(Path.GetFullPath(_fluentCommandLineParser.Object.MapsDirectory)); if (errors) { return; } _logger.Info($"Maps loaded: {EventLog.EventLogMaps.Count:N0}"); } _includeIds = new HashSet <int>(); _excludeIds = new HashSet <int>(); if (_fluentCommandLineParser.Object.ExcludeIds.IsNullOrEmpty() == false) { var excSegs = _fluentCommandLineParser.Object.ExcludeIds.Split(','); foreach (var incSeg in excSegs) { if (int.TryParse(incSeg, out var goodId)) { _excludeIds.Add(goodId); } } } if (_fluentCommandLineParser.Object.IncludeIds.IsNullOrEmpty() == false) { _excludeIds.Clear(); var incSegs = _fluentCommandLineParser.Object.IncludeIds.Split(','); foreach (var incSeg in incSegs) { if (int.TryParse(incSeg, out var goodId)) { _includeIds.Add(goodId); } } } if (_fluentCommandLineParser.Object.Vss) { string driveLetter; if (_fluentCommandLineParser.Object.File.IsEmpty() == false) { driveLetter = Path.GetPathRoot(Path.GetFullPath(_fluentCommandLineParser.Object.File)) .Substring(0, 1); } else { driveLetter = Path.GetPathRoot(Path.GetFullPath(_fluentCommandLineParser.Object.Directory)) .Substring(0, 1); } Helper.MountVss(driveLetter, VssDir); Console.WriteLine(); } EventLog.TimeDiscrepancyThreshold = _fluentCommandLineParser.Object.TimeDiscrepancyThreshold; if (_fluentCommandLineParser.Object.File.IsNullOrEmpty() == false) { if (File.Exists(_fluentCommandLineParser.Object.File) == false) { _logger.Warn($"'{_fluentCommandLineParser.Object.File}' does not exist! Exiting"); return; } if (_swXml == null && _swJson == null && _swCsv == null) { //no need for maps _logger.Debug("Clearing map collection since no output specified"); EventLog.EventLogMaps.Clear(); } _fluentCommandLineParser.Object.Dedupe = false; ProcessFile(Path.GetFullPath(_fluentCommandLineParser.Object.File)); if (_fluentCommandLineParser.Object.Vss) { var vssDirs = Directory.GetDirectories(VssDir); var root = Path.GetPathRoot(Path.GetFullPath(_fluentCommandLineParser.Object.File)); var stem = Path.GetFullPath(_fluentCommandLineParser.Object.File).Replace(root, ""); foreach (var vssDir in vssDirs) { var newPath = Path.Combine(vssDir, stem); if (File.Exists(newPath)) { ProcessFile(newPath); } } } } else { _logger.Info($"Looking for event log files in '{_fluentCommandLineParser.Object.Directory}'"); _logger.Info(""); var f = new DirectoryEnumerationFilters { InclusionFilter = fsei => fsei.Extension.ToUpperInvariant() == ".EVTX", RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink, ErrorFilter = (errorCode, errorMessage, pathProcessed) => true }; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; var files2 = Directory.EnumerateFileSystemEntries(Path.GetFullPath(_fluentCommandLineParser.Object.Directory), dirEnumOptions, f); if (_swXml == null && _swJson == null && _swCsv == null) { //no need for maps _logger.Debug("Clearing map collection since no output specified"); EventLog.EventLogMaps.Clear(); } foreach (var file in files2) { ProcessFile(file); } if (_fluentCommandLineParser.Object.Vss) { var vssDirs = Directory.GetDirectories(VssDir); Console.WriteLine(); foreach (var vssDir in vssDirs) { var root = Path.GetPathRoot(Path.GetFullPath(_fluentCommandLineParser.Object.Directory)); var stem = Path.GetFullPath(_fluentCommandLineParser.Object.Directory).Replace(root, ""); var target = Path.Combine(vssDir, stem); _logger.Fatal($"\r\nSearching 'VSS{target.Replace($"{VssDir}\\","")}' for event logs..."); var vssFiles = Helper.GetFilesFromPath(target, true, "*.evtx"); foreach (var file in vssFiles) { ProcessFile(file); } } } } try { _swCsv?.Flush(); _swCsv?.Close(); _swJson?.Flush(); _swJson?.Close(); _swXml?.Flush(); _swXml?.Close(); } catch (Exception e) { _logger.Error($"Error when flushing output files to disk! Error message: {e.Message}"); } sw.Stop(); _logger.Info(""); var suff = string.Empty; if (_fileCount != 1) { suff = "s"; } _logger.Error( $"Processed {_fileCount:N0} file{suff} in {sw.Elapsed.TotalSeconds:N4} seconds\r\n"); if (_errorFiles.Count > 0) { _logger.Info(""); _logger.Error("Files with errors"); foreach (var errorFile in _errorFiles) { _logger.Info($"'{errorFile.Key}' error count: {errorFile.Value:N0}"); } _logger.Info(""); } if (_fluentCommandLineParser.Object.Vss) { if (Directory.Exists(VssDir)) { foreach (var directory in Directory.GetDirectories(VssDir)) { Directory.Delete(directory); } Directory.Delete(VssDir, true, true); } } }
private static void DoWork(string d, string f, string @out, bool ca, bool cn, bool debug, bool trace) { if (f.IsNullOrEmpty() == false || d.IsNullOrEmpty() == false) { if (@out.IsNullOrEmpty()) { var helpBld = new HelpBuilder(LocalizationResources.Instance, Console.WindowWidth); var hc = new HelpContext(helpBld, _rootCommand, Console.Out); helpBld.Write(hc); Console.WriteLine(); _logger.Warn("--out is required. Exiting"); Console.WriteLine(); return; } } if (debug) { foreach (var r in LogManager.Configuration.LoggingRules) { r.EnableLoggingForLevel(LogLevel.Debug); } LogManager.ReconfigExistingLoggers(); _logger.Debug("Enabled debug messages..."); } if (trace) { foreach (var r in LogManager.Configuration.LoggingRules) { r.EnableLoggingForLevel(LogLevel.Trace); } LogManager.ReconfigExistingLoggers(); _logger.Trace("Enabled trace messages..."); } var hivesToProcess = new List <string>(); _logger.Info(Header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}\r\n"); if (f?.Length > 0) { if (File.Exists(f) == false) { _logger.Error($"File '{f}' does not exist."); return; } hivesToProcess.Add(f); } else if (d?.Length > 0) { if (Directory.Exists(d) == false) { _logger.Error($"Directory '{d}' does not exist."); return; } var okFileParts = new HashSet <string>(); okFileParts.Add("USRCLASS"); okFileParts.Add("NTUSER"); okFileParts.Add("SYSTEM"); okFileParts.Add("SAM"); okFileParts.Add("SOFTWARE"); okFileParts.Add("AMCACHE"); okFileParts.Add("SYSCACHE"); okFileParts.Add("SECURITY"); okFileParts.Add("DRIVERS"); okFileParts.Add("COMPONENTS"); var directoryEnumerationFilters = new DirectoryEnumerationFilters(); directoryEnumerationFilters.InclusionFilter = fsei => { if (fsei.Extension.ToUpperInvariant() == ".LOG1" || fsei.Extension.ToUpperInvariant() == ".LOG2" || fsei.Extension.ToUpperInvariant() == ".DLL" || fsei.Extension.ToUpperInvariant() == ".LOG" || fsei.Extension.ToUpperInvariant() == ".CSV" || fsei.Extension.ToUpperInvariant() == ".BLF" || fsei.Extension.ToUpperInvariant() == ".REGTRANS-MS" || fsei.Extension.ToUpperInvariant() == ".EXE" || fsei.Extension.ToUpperInvariant() == ".TXT" || fsei.Extension.ToUpperInvariant() == ".INI") { return(false); } var foundOkFilePart = false; foreach (var okFilePart in okFileParts) { if (fsei.FileName.ToUpperInvariant().Contains(okFilePart)) { foundOkFilePart = true; // return true; } } if (foundOkFilePart == false) { return(false); } var fi = new FileInfo(fsei.FullPath); if (fi.Length < 4) { return(false); } try { using (var fs = new FileStream(fsei.FullPath, FileMode.Open, FileAccess.Read)) { using (var br = new BinaryReader(fs, new ASCIIEncoding())) { try { var chunk = br.ReadBytes(4); var sig = BitConverter.ToInt32(chunk, 0); if (sig == 0x66676572) { return(true); } } catch (Exception) { } return(false); } } } catch (IOException) { if (Helper.IsAdministrator() == false) { throw new UnauthorizedAccessException("Administrator privileges not found!"); } var files = new List <string>(); files.Add(fsei.FullPath); var rawf = Helper.GetRawFiles(files); if (rawf.First().FileStream.Length == 0) { return(false); } try { var b = new byte[4]; rawf.First().FileStream.ReadExactly(b, 4); var sig = BitConverter.ToInt32(b, 0); if (sig == 0x66676572) { return(true); } } catch (Exception) { } return(false); } }; directoryEnumerationFilters.RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; directoryEnumerationFilters.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; if (Directory.Exists(@out) == false) { _logger.Info($"Creating --out directory '{@out}'..."); Directory.CreateDirectory(@out); } else { if (Directory.GetFiles(@out).Length > 0 && cn) { _logger.Warn($"'{@out}' contains files! This may cause --cn to revert back to uncompressed names. Ideally, '{@out}' should be empty."); Console.WriteLine(); } } _logger.Fatal($"Searching '{d}' for hives..."); var files2 = Alphaleonis.Win32.Filesystem.Directory.EnumerateFileSystemEntries(d, dirEnumOptions, directoryEnumerationFilters); var count = 0; try { hivesToProcess.AddRange(files2); count = hivesToProcess.Count; _logger.Info($"\tHives found: {count:N0}"); } catch (Exception ex) { _logger.Fatal($"Could not access all files in '{d}'! Error: {ex.Message}"); _logger.Error(""); _logger.Fatal("Rerun the program with Administrator privileges to try again\r\n"); //Environment.Exit(-1); } } else { var helpBld = new HelpBuilder(LocalizationResources.Instance, Console.WindowWidth); var hc = new HelpContext(helpBld, _rootCommand, Console.Out); helpBld.Write(hc); return; } if (hivesToProcess.Count == 0) { _logger.Warn("No hives were found. Exiting..."); return; } _sw = new Stopwatch(); _sw.Start(); foreach (var hiveToProcess in hivesToProcess) { _logger.Info(""); byte[] updatedBytes = null; _logger.Info($"Processing hive '{hiveToProcess}'"); if (File.Exists(hiveToProcess) == false) { _logger.Warn($"'{hiveToProcess}' does not exist. Skipping"); continue; } try { RegistryHive reg; var dirname = Path.GetDirectoryName(hiveToProcess); var hiveBase = Path.GetFileName(hiveToProcess); List <RawCopyReturn> rawFiles = null; try { using (var fs = new FileStream(hiveToProcess, FileMode.Open, FileAccess.Read)) { reg = new RegistryHive(fs.ReadFully(), hiveToProcess); } } catch (IOException) { //file is in use if (Helper.IsAdministrator() == false) { throw new UnauthorizedAccessException("Administrator privileges not found!"); } _logger.Warn($"\t'{hiveToProcess}' is in use. Rerouting...\r\n"); var files = new List <string>(); files.Add(hiveToProcess); var logFiles = Directory.GetFiles(dirname, $"{hiveBase}.LOG?"); foreach (var logFile in logFiles) { files.Add(logFile); } rawFiles = Helper.GetRawFiles(files); if (rawFiles.First().FileStream.Length == 0) { continue; } var bb = rawFiles.First().FileStream.ReadFully(); reg = new RegistryHive(bb, rawFiles.First().InputFilename); } if (reg.Header.PrimarySequenceNumber != reg.Header.SecondarySequenceNumber) { if (string.IsNullOrEmpty(dirname)) { dirname = "."; } var logFiles = Directory.GetFiles(dirname, $"{hiveBase}.LOG?"); if (logFiles.Length == 0) { if (ca) { _logger.Info($"\tHive '{hiveToProcess}' is dirty, but no logs were found in the same directory. --ca is true. Copying..."); updatedBytes = File.ReadAllBytes(hiveToProcess); } else { _logger.Info($"\tHive '{hiveToProcess}' is dirty and no transaction logs were found in the same directory. --ca is false. Skipping..."); continue; } } if (updatedBytes == null) { if (rawFiles != null) { var lt = new List <TransactionLogFileInfo>(); foreach (var rawCopyReturn in rawFiles.Skip(1).ToList()) { var bb1 = rawCopyReturn.FileStream.ReadFully(); var tt = new TransactionLogFileInfo(rawCopyReturn.InputFilename, bb1); lt.Add(tt); } updatedBytes = reg.ProcessTransactionLogs(lt); } else { updatedBytes = reg.ProcessTransactionLogs(logFiles.ToList()); } } } if (updatedBytes == null) { if (ca) { _logger.Info($"\tHive '{hiveToProcess}' is not dirty, but --ca is true. Copying..."); updatedBytes = File.ReadAllBytes(hiveToProcess); } else { _logger.Info($"\tHive '{hiveToProcess}' is not dirty and --ca is false. Skipping..."); continue; } } var outFile = hiveToProcess.Replace(":", "").Replace("\\", "_"); var outFileAll = Path.Combine(@out, outFile); if (cn && (outFileAll.ToUpperInvariant().Contains("NTUSER") || outFileAll.ToUpperInvariant().Contains("USRCLASS"))) { var dl = hiveToProcess[0].ToString(); var segs = hiveToProcess.SplitAndTrim('\\'); var profile = segs[2]; var filename = Path.GetFileName(hiveToProcess); var outFile2 = $"{dl}_{profile}_{filename}"; outFileAll = Path.Combine(@out, outFile2); } if (File.Exists(outFileAll)) { var oldOut = outFileAll; outFileAll = Path.Combine(@out, outFile); _logger.Warn($"\tFile '{oldOut}' exists! Saving as non-compressed name: '{outFileAll}'"); } _logger.Fatal($"\tSaving updated hive to '{outFileAll}'"); using (var fs = new FileStream(outFileAll, FileMode.Create)) { fs.Write(updatedBytes, 0, updatedBytes.Length); fs.Flush(); fs.Close(); } } catch (Exception ex) { if (ex.Message.Contains("Sequence numbers do not match and transaction") == false) { if (ex.Message.Contains("Administrator privileges not found")) { _logger.Fatal($"Could not access '{hiveToProcess}' because it is in use"); _logger.Error(""); _logger.Fatal("Rerun the program with Administrator privileges to try again\r\n"); } else { _logger.Error($"There was an error: {ex.Message}"); } } } } _sw.Stop(); _logger.Info(""); _logger.Info($"Total processing time: {_sw.Elapsed.TotalSeconds:N3} seconds"); _logger.Info(""); }
private static IEnumerable <string> EnumerateFilesImpl(string path, IList <string> patterns, FileFilter filter, Gitignore gitignore) { DirectoryEnumerationFilters fileFilters = new DirectoryEnumerationFilters { ErrorFilter = (errorCode, errorMessage, pathProcessed) => { logger.Error($"Find file error {errorCode}: {errorMessage} on {pathProcessed}"); return(true); } }; bool includeAllFiles = (patterns.Count == 0 || (patterns.Count == 1 && (patterns[0] == "*.*" || patterns[0] == "*"))) && (gitignore == null || gitignore.Files.Count == 0); if (includeAllFiles) { fileFilters.InclusionFilter = fsei => { if (!filter.IncludeHidden && fsei.IsHidden) { return(false); } return(true); }; } else { fileFilters.InclusionFilter = fsei => { if (!filter.IncludeHidden && fsei.IsHidden) { return(false); } if (gitignore != null && gitignore.Files.Contains(fsei.FullPath)) { return(false); } foreach (string pattern in patterns) { if (WildcardMatch(fsei.FileName, pattern, true)) { return(true); } } if (filter.IncludeArchive) { foreach (string pattern in ArchiveDirectory.Patterns) { if (WildcardMatch(fsei.FileName, pattern, true)) { return(true); } } } return(false); }; } var fileOptions = baseFileOptions; if (filter.FollowSymlinks) { fileOptions &= ~DirectoryEnumerationOptions.SkipReparsePoints; } return(Directory.EnumerateFiles(path, fileOptions, fileFilters, PathFormat.FullPath)); }
public static bool LoadMaps(string mapPath) { EventLogMaps = new Dictionary <string, EventLogMap>(); var f = new DirectoryEnumerationFilters(); f.InclusionFilter = fsei => fsei.Extension.ToUpperInvariant() == ".MAP"; f.RecursionFilter = null;//entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; f.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; var mapFiles = Directory.EnumerateFileSystemEntries(mapPath, dirEnumOptions, f).ToList(); var l = LogManager.GetLogger("LoadMaps"); var deserializer = new DeserializerBuilder() .Build(); var validator = new EventLogMapValidator(); var errorMaps = new List <string>(); foreach (var mapFile in mapFiles.OrderBy(t => t)) { try { var eventMapFile = deserializer.Deserialize <EventLogMap>(File.ReadAllText(mapFile)); l.Trace(eventMapFile.Dump); var validate = validator.Validate(eventMapFile); if (DisplayValidationResults(validate, mapFile)) { if (EventLogMaps.ContainsKey( $"{eventMapFile.EventId}-{eventMapFile.Channel.ToUpperInvariant()}") == false) { l.Debug($"'{Path.GetFileName(mapFile)}' is valid. Adding to maps..."); EventLogMaps.Add($"{eventMapFile.EventId}-{eventMapFile.Channel.ToUpperInvariant()}", eventMapFile); } else { l.Warn( $"A map for event id '{eventMapFile.EventId}' with Channel '{eventMapFile.Channel}' already exists. Map '{Path.GetFileName(mapFile)}' will be skipped"); } } else { errorMaps.Add(Path.GetFileName(mapFile)); } } catch (SyntaxErrorException se) { errorMaps.Add(Path.GetFileName(mapFile)); Console.WriteLine(); l.Warn($"Syntax error in '{mapFile}':"); l.Fatal(se.Message); var lines = File.ReadLines(mapFile).ToList(); var fileContents = mapFile.ReadAllText(); var badLine = lines[se.Start.Line - 1]; Console.WriteLine(); l.Fatal( $"Bad line (or close to it) '{badLine}' has invalid data at column '{se.Start.Column}'"); if (fileContents.Contains('\t')) { Console.WriteLine(); l.Error( "Bad line contains one or more tab characters. Replace them with spaces"); Console.WriteLine(); l.Info(fileContents.Replace("\t", "<TAB>")); } } catch (YamlException ye) { errorMaps.Add(Path.GetFileName(mapFile)); Console.WriteLine(); l.Warn($"Syntax error in '{mapFile}':"); var fileContents = mapFile.ReadAllText(); l.Info(fileContents); if (ye.InnerException != null) { l.Fatal(ye.InnerException.Message); } Console.WriteLine(); l.Fatal("Verify all properties against example files or manual and try again."); } catch (Exception e) { l.Error($"Error loading map file '{mapFile}': {e.Message}"); } } if (errorMaps.Count > 0) { l.Error("\r\nThe following maps had errors. Scroll up to review errors, correct them, and try again."); foreach (var errorMap in errorMaps) { l.Info(errorMap); } l.Info(""); } return(errorMaps.Count > 0); }
private static void Main(string[] args) { ExceptionlessClient.Default.Startup("tYeWS6A5K5uItgpB44dnNy2qSb2xJxiQWRRGWebq"); SetupNLog(); _logger = LogManager.GetLogger("EvtxECmd"); _fluentCommandLineParser = new FluentCommandLineParser <ApplicationArguments> { IsCaseSensitive = false }; _fluentCommandLineParser.Setup(arg => arg.File) .As('f') .WithDescription("File to process. This or -d is required\r\n"); _fluentCommandLineParser.Setup(arg => arg.Directory) .As('d') .WithDescription("Directory to process that contains evtx files. This or -f is required"); _fluentCommandLineParser.Setup(arg => arg.CsvDirectory) .As("csv") .WithDescription( "Directory to save CSV formatted results to."); // This, --json, or --xml required _fluentCommandLineParser.Setup(arg => arg.CsvName) .As("csvf") .WithDescription( "File name to save CSV formatted results to. When present, overrides default name"); _fluentCommandLineParser.Setup(arg => arg.JsonDirectory) .As("json") .WithDescription( "Directory to save JSON formatted results to."); // This, --csv, or --xml required _fluentCommandLineParser.Setup(arg => arg.JsonName) .As("jsonf") .WithDescription( "File name to save JSON formatted results to. When present, overrides default name"); _fluentCommandLineParser.Setup(arg => arg.XmlDirectory) .As("xml") .WithDescription( "Directory to save XML formatted results to."); // This, --csv, or --json required _fluentCommandLineParser.Setup(arg => arg.XmlName) .As("xmlf") .WithDescription( "File name to save XML formatted results to. When present, overrides default name\r\n"); _fluentCommandLineParser.Setup(arg => arg.DateTimeFormat) .As("dt") .WithDescription( "The custom date/time format to use when displaying time stamps. Default is: yyyy-MM-dd HH:mm:ss.fffffff") .SetDefault("yyyy-MM-dd HH:mm:ss.fffffff"); _fluentCommandLineParser.Setup(arg => arg.IncludeIds) .As("inc") .WithDescription( "List of event IDs to process. All others are ignored. Overrides --exc Format is 4624,4625,5410") .SetDefault(string.Empty); _fluentCommandLineParser.Setup(arg => arg.ExcludeIds) .As("exc") .WithDescription( "List of event IDs to IGNORE. All others are included. Format is 4624,4625,5410") .SetDefault(string.Empty); _fluentCommandLineParser.Setup(arg => arg.FullJson) .As("fj") .WithDescription( "When true, export all available data when using --json. Default is FALSE.") .SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Metrics) .As("met") .WithDescription( "When true, show metrics about processed event log. Default is TRUE.\r\n") .SetDefault(true); _fluentCommandLineParser.Setup(arg => arg.MapsDirectory) .As("maps") .WithDescription( "The path where event maps are located. Defaults to 'Maps' folder where program was executed\r\n ") .SetDefault(Path.Combine(BaseDirectory, "Maps")); _fluentCommandLineParser.Setup(arg => arg.Debug) .As("debug") .WithDescription("Show debug information during processing").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Trace) .As("trace") .WithDescription("Show trace information during processing\r\n").SetDefault(false); var header = $"EvtxECmd version {Assembly.GetExecutingAssembly().GetName().Version}" + "\r\n\r\nAuthor: Eric Zimmerman ([email protected])" + "\r\nhttps://github.com/EricZimmerman/evtx"; var footer = @"Examples: EvtxECmd.exe -f ""C:\Temp\Application.evtx"" --csv ""c:\temp\out"" --csvf MyOutputFile.csv" + "\r\n\t " + @" EvtxECmd.exe -f ""C:\Temp\Application.evtx"" --csv ""c:\temp\out""" + "\r\n\t " + @" EvtxECmd.exe -f ""C:\Temp\Application.evtx"" --json ""c:\temp\jsonout""" + "\r\n\t " + "\r\n\t" + " Short options (single letter) are prefixed with a single dash. Long commands are prefixed with two dashes\r\n"; _fluentCommandLineParser.SetupHelp("?", "help") .WithHeader(header) .Callback(text => _logger.Info(text + "\r\n" + footer)); var result = _fluentCommandLineParser.Parse(args); if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } if (_fluentCommandLineParser.Object.File.IsNullOrEmpty() && _fluentCommandLineParser.Object.Directory.IsNullOrEmpty()) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("-f or -d is required. Exiting"); return; } _logger.Info(header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}\r\n"); if (IsAdministrator() == false) { _logger.Fatal("Warning: Administrator privileges not found!\r\n"); } if (_fluentCommandLineParser.Object.Debug) { LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Debug); } if (_fluentCommandLineParser.Object.Trace) { LogManager.Configuration.LoggingRules.First().EnableLoggingForLevel(LogLevel.Trace); } LogManager.ReconfigExistingLoggers(); var sw = new Stopwatch(); sw.Start(); var ts = DateTimeOffset.UtcNow; _errorFiles = new Dictionary <string, int>(); if (_fluentCommandLineParser.Object.JsonDirectory.IsNullOrEmpty() == false) { if (Directory.Exists(_fluentCommandLineParser.Object.JsonDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.JsonDirectory}' doesn't exist. Creating..."); try { Directory.CreateDirectory(_fluentCommandLineParser.Object.JsonDirectory); } catch (Exception) { _logger.Fatal( $"Unable to create directory '{_fluentCommandLineParser.Object.JsonDirectory}'. Does a file with the same name exist? Exiting"); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.json"; if (_fluentCommandLineParser.Object.JsonName.IsNullOrEmpty() == false) { outName = Path.GetFileName(_fluentCommandLineParser.Object.JsonName); } var outFile = Path.Combine(_fluentCommandLineParser.Object.JsonDirectory, outName); _logger.Warn($"json output will be saved to '{outFile}'\r\n"); try { _swJson = new StreamWriter(outFile, false, Encoding.UTF8); } catch (Exception) { _logger.Error($"Unable to open '{outFile}'! Is it in use? Exiting!\r\n"); Environment.Exit(0); } } if (_fluentCommandLineParser.Object.XmlDirectory.IsNullOrEmpty() == false) { if (Directory.Exists(_fluentCommandLineParser.Object.XmlDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.XmlDirectory}' doesn't exist. Creating..."); try { Directory.CreateDirectory(_fluentCommandLineParser.Object.XmlDirectory); } catch (Exception) { _logger.Fatal( $"Unable to create directory '{_fluentCommandLineParser.Object.XmlDirectory}'. Does a file with the same name exist? Exiting"); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.xml"; if (_fluentCommandLineParser.Object.XmlName.IsNullOrEmpty() == false) { outName = Path.GetFileName(_fluentCommandLineParser.Object.XmlName); } var outFile = Path.Combine(_fluentCommandLineParser.Object.XmlDirectory, outName); _logger.Warn($"XML output will be saved to '{outFile}'\r\n"); try { _swXml = new StreamWriter(outFile, false, Encoding.UTF8); } catch (Exception) { _logger.Error($"Unable to open '{outFile}'! Is it in use? Exiting!\r\n"); Environment.Exit(0); } } if (_fluentCommandLineParser.Object.CsvDirectory.IsNullOrEmpty() == false) { if (Directory.Exists(_fluentCommandLineParser.Object.CsvDirectory) == false) { _logger.Warn( $"Path to '{_fluentCommandLineParser.Object.CsvDirectory}' doesn't exist. Creating..."); try { Directory.CreateDirectory(_fluentCommandLineParser.Object.CsvDirectory); } catch (Exception) { _logger.Fatal( $"Unable to create directory '{_fluentCommandLineParser.Object.CsvDirectory}'. Does a file with the same name exist? Exiting"); return; } } var outName = $"{ts:yyyyMMddHHmmss}_EvtxECmd_Output.csv"; if (_fluentCommandLineParser.Object.CsvName.IsNullOrEmpty() == false) { outName = Path.GetFileName(_fluentCommandLineParser.Object.CsvName); } var outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _logger.Warn($"CSV output will be saved to '{outFile}'\r\n"); try { _swCsv = new StreamWriter(outFile, false, Encoding.UTF8); _csvWriter = new CsvWriter(_swCsv); } catch (Exception) { _logger.Error($"Unable to open '{outFile}'! Is it in use? Exiting!\r\n"); Environment.Exit(0); } var foo = _csvWriter.Configuration.AutoMap <EventRecord>(); foo.Map(t => t.PayloadXml).Ignore(); foo.Map(t => t.RecordPosition).Ignore(); foo.Map(t => t.Size).Ignore(); foo.Map(t => t.Timestamp).Ignore(); foo.Map(t => t.RecordNumber).Index(0); foo.Map(t => t.TimeCreated).Index(1); foo.Map(t => t.TimeCreated).ConvertUsing(t => $"{t.TimeCreated.ToString(_fluentCommandLineParser.Object.DateTimeFormat)}"); foo.Map(t => t.EventId).Index(2); foo.Map(t => t.Level).Index(3); foo.Map(t => t.Provider).Index(4); foo.Map(t => t.Channel).Index(5); foo.Map(t => t.ProcessId).Index(6); foo.Map(t => t.ThreadId).Index(7); foo.Map(t => t.Computer).Index(8); foo.Map(t => t.UserId).Index(9); foo.Map(t => t.MapDescription).Index(10); foo.Map(t => t.UserName).Index(11); foo.Map(t => t.RemoteHost).Index(12); foo.Map(t => t.PayloadData1).Index(13); foo.Map(t => t.PayloadData2).Index(14); foo.Map(t => t.PayloadData3).Index(15); foo.Map(t => t.PayloadData4).Index(16); foo.Map(t => t.PayloadData5).Index(17); foo.Map(t => t.PayloadData6).Index(18); foo.Map(t => t.ExecutableInfo).Index(19); foo.Map(t => t.SourceFile).Index(20); _csvWriter.Configuration.RegisterClassMap(foo); _csvWriter.WriteHeader <EventRecord>(); _csvWriter.NextRecord(); } if (Directory.Exists(_fluentCommandLineParser.Object.MapsDirectory) == false) { _logger.Warn( $"Maps directory '{_fluentCommandLineParser.Object.MapsDirectory}' does not exist! Event ID maps will not be loaded!!"); } else { _logger.Debug($"Loading maps from '{Path.GetFullPath(_fluentCommandLineParser.Object.MapsDirectory)}'"); var errors = EventLog.LoadMaps(Path.GetFullPath(_fluentCommandLineParser.Object.MapsDirectory)); if (errors) { return; } _logger.Info($"Maps loaded: {EventLog.EventLogMaps.Count:N0}"); } _includeIds = new HashSet <int>(); _excludeIds = new HashSet <int>(); if (_fluentCommandLineParser.Object.ExcludeIds.IsNullOrEmpty() == false) { var excSegs = _fluentCommandLineParser.Object.ExcludeIds.Split(','); foreach (var incSeg in excSegs) { if (int.TryParse(incSeg, out var goodId)) { _excludeIds.Add(goodId); } } } if (_fluentCommandLineParser.Object.IncludeIds.IsNullOrEmpty() == false) { _excludeIds.Clear(); var incSegs = _fluentCommandLineParser.Object.IncludeIds.Split(','); foreach (var incSeg in incSegs) { if (int.TryParse(incSeg, out var goodId)) { _includeIds.Add(goodId); } } } if (_fluentCommandLineParser.Object.File.IsNullOrEmpty() == false) { if (File.Exists(_fluentCommandLineParser.Object.File) == false) { _logger.Warn($"'{_fluentCommandLineParser.Object.File}' does not exist! Exiting"); return; } ProcessFile(_fluentCommandLineParser.Object.File); } else { _logger.Info($"Looking for event log files in '{_fluentCommandLineParser.Object.Directory}'"); _logger.Info(""); var f = new DirectoryEnumerationFilters(); f.InclusionFilter = fsei => fsei.Extension.ToUpperInvariant() == ".EVTX"; f.RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; f.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; var files2 = Directory.EnumerateFileSystemEntries(Path.GetFullPath(_fluentCommandLineParser.Object.Directory), dirEnumOptions, f); foreach (var file in files2) { ProcessFile(file); } } _swCsv?.Flush(); _swCsv?.Close(); _swJson?.Flush(); _swJson?.Close(); _swXml?.Flush(); _swXml?.Close(); sw.Stop(); _logger.Info(""); var suff = string.Empty; if (_fileCount != 1) { suff = "s"; } _logger.Error( $"Processed {_fileCount:N0} file{suff} in {sw.Elapsed.TotalSeconds:N4} seconds\r\n"); if (_errorFiles.Count > 0) { _logger.Info(""); _logger.Error("Files with errors"); foreach (var errorFile in _errorFiles) { _logger.Info($"'{errorFile.Key}' error count: {errorFile.Value:N0}"); } _logger.Info(""); } }
static void Main(string[] args) { Exceptionless.ExceptionlessClient.Default.Startup("fTcEOUkt1CxljTyOZfsr8AcSGQwWE4aYaYqk7cE1"); SetupNLog(); _logger = LogManager.GetCurrentClassLogger(); _fluentCommandLineParser = new FluentCommandLineParser <ApplicationArguments> { IsCaseSensitive = false }; _fluentCommandLineParser.Setup(arg => arg.Directory) .As('d') .WithDescription( "Directory to look for hives (recursively). -f or -d is required."); _fluentCommandLineParser.Setup(arg => arg.HiveFile) .As('f') .WithDescription("Hive to process. -f or -d is required.\r\n"); _fluentCommandLineParser.Setup(arg => arg.OutDirectory) .As("out") .WithDescription( "Directory to save updated hives to. Only dirty hives with logs applied will end up in --out directory\r\n"); _fluentCommandLineParser.Setup(arg => arg.CopyAlways) .As("ca") .WithDescription( "When true, always copy hives to --out directory, even if they aren't dirty. Default is TRUE").SetDefault(true); _fluentCommandLineParser.Setup(arg => arg.CompressNames) .As("cn") .WithDescription( "When true, compress names for profile based hives. Default is TRUE\r\n").SetDefault(true); _fluentCommandLineParser.Setup(arg => arg.Debug) .As("debug") .WithDescription("Show debug information during processing").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Trace) .As("trace") .WithDescription("Show trace information during processing").SetDefault(false); var header = $"rla version {Assembly.GetExecutingAssembly().GetName().Version}" + "\r\n\r\nAuthor: Eric Zimmerman ([email protected])" + "\r\nhttps://github.com/EricZimmerman/RECmd\r\n\r\nNote: Enclose all strings containing spaces (and all RegEx) with double quotes"; var footer = @"Example: rla.exe --f ""C:\Temp\UsrClass 1.dat"" --out C:\temp" + "\r\n\t " + @"rla.exe --d ""D:\temp\"" --out c:\temp" + "\r\n"; _fluentCommandLineParser.SetupHelp("?", "help").WithHeader(header) .Callback(text => _logger.Info(text + "\r\n" + footer)); var result = _fluentCommandLineParser.Parse(args); if (_fluentCommandLineParser.Object.HiveFile.IsNullOrEmpty() == false || _fluentCommandLineParser.Object.Directory.IsNullOrEmpty() == false) { if (_fluentCommandLineParser.Object.OutDirectory.IsNullOrEmpty()) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); Console.WriteLine(); _logger.Warn($"--out is required. Exiting"); Console.WriteLine(); return; } } if (_fluentCommandLineParser.Object.Debug) { foreach (var r in LogManager.Configuration.LoggingRules) { r.EnableLoggingForLevel(LogLevel.Debug); } LogManager.ReconfigExistingLoggers(); _logger.Debug("Enabled debug messages..."); } if (_fluentCommandLineParser.Object.Trace) { foreach (var r in LogManager.Configuration.LoggingRules) { r.EnableLoggingForLevel(LogLevel.Trace); } LogManager.ReconfigExistingLoggers(); _logger.Trace("Enabled trace messages..."); } if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } var hivesToProcess = new List <string>(); _logger.Info(header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}\r\n"); if (_fluentCommandLineParser.Object.HiveFile?.Length > 0) { if (File.Exists(_fluentCommandLineParser.Object.HiveFile) == false) { _logger.Error($"File '{_fluentCommandLineParser.Object.HiveFile}' does not exist."); return; } hivesToProcess.Add(_fluentCommandLineParser.Object.HiveFile); } else if (_fluentCommandLineParser.Object.Directory?.Length > 0) { if (Directory.Exists(_fluentCommandLineParser.Object.Directory) == false) { _logger.Error($"Directory '{_fluentCommandLineParser.Object.Directory}' does not exist."); return; } var okFileParts = new HashSet <string>(); okFileParts.Add("USRCLASS"); okFileParts.Add("NTUSER"); okFileParts.Add("SYSTEM"); okFileParts.Add("SAM"); okFileParts.Add("SOFTWARE"); okFileParts.Add("AMCACHE"); okFileParts.Add("SYSCACHE"); okFileParts.Add("SECURITY"); okFileParts.Add("DRIVERS"); okFileParts.Add("COMPONENTS"); var f = new DirectoryEnumerationFilters(); f.InclusionFilter = fsei => { if (fsei.Extension.ToUpperInvariant() == ".LOG1" || fsei.Extension.ToUpperInvariant() == ".LOG2" || fsei.Extension.ToUpperInvariant() == ".DLL" || fsei.Extension.ToUpperInvariant() == ".LOG" || fsei.Extension.ToUpperInvariant() == ".CSV" || fsei.Extension.ToUpperInvariant() == ".BLF" || fsei.Extension.ToUpperInvariant() == ".REGTRANS-MS" || fsei.Extension.ToUpperInvariant() == ".EXE" || fsei.Extension.ToUpperInvariant() == ".TXT" || fsei.Extension.ToUpperInvariant() == ".INI") { return(false); } var foundOkFilePart = false; foreach (var okFilePart in okFileParts) { if (fsei.FileName.ToUpperInvariant().Contains(okFilePart)) { foundOkFilePart = true; // return true; } } if (foundOkFilePart == false) { return(false); } var fi = new FileInfo(fsei.FullPath); if (fi.Length < 4) { return(false); } try { using (var fs = new FileStream(fsei.FullPath, FileMode.Open, FileAccess.Read)) { using (var br = new BinaryReader(fs, new ASCIIEncoding())) { try { var chunk = br.ReadBytes(4); var sig = BitConverter.ToInt32(chunk, 0); if (sig == 0x66676572) { return(true); } } catch (Exception) { } return(false); } } } catch (IOException) { if (Helper.IsAdministrator() == false) { throw new UnauthorizedAccessException("Administrator privileges not found!"); } var files = new List <string>(); files.Add(fsei.FullPath); var rawf = Helper.GetFiles(files); if (rawf.First().FileStream.Length == 0) { return(false); } try { var b = new byte[4]; rawf.First().FileStream.ReadExactly(b, 4); var sig = BitConverter.ToInt32(b, 0); if (sig == 0x66676572) { return(true); } } catch (Exception) { } return(false); } }; f.RecursionFilter = entryInfo => !entryInfo.IsMountPoint && !entryInfo.IsSymbolicLink; f.ErrorFilter = (errorCode, errorMessage, pathProcessed) => true; var dirEnumOptions = DirectoryEnumerationOptions.Files | DirectoryEnumerationOptions.Recursive | DirectoryEnumerationOptions.SkipReparsePoints | DirectoryEnumerationOptions.ContinueOnException | DirectoryEnumerationOptions.BasicSearch; if (Directory.Exists(_fluentCommandLineParser.Object.OutDirectory) == false) { _logger.Info($"Creating --out directory '{_fluentCommandLineParser.Object.OutDirectory}'..."); Directory.CreateDirectory(_fluentCommandLineParser.Object.OutDirectory); } else { if (Directory.GetFiles(_fluentCommandLineParser.Object.OutDirectory).Length > 0 && _fluentCommandLineParser.Object.CompressNames) { _logger.Warn($"'{_fluentCommandLineParser.Object.OutDirectory}' contains files! This may cause --cn to revert back to uncompressed names. Ideally, '{_fluentCommandLineParser.Object.OutDirectory}' should be empty."); Console.WriteLine(); } } _logger.Fatal($"Searching '{_fluentCommandLineParser.Object.Directory}' for hives..."); var files2 = Directory.EnumerateFileSystemEntries(_fluentCommandLineParser.Object.Directory, dirEnumOptions, f); var count = 0; try { hivesToProcess.AddRange(files2); count = hivesToProcess.Count; _logger.Info($"\tHives found: {count:N0}"); } catch (Exception ex) { _logger.Fatal($"Could not access all files in '{_fluentCommandLineParser.Object.Directory}'! Error: {ex.Message}"); _logger.Error(""); _logger.Fatal("Rerun the program with Administrator privileges to try again\r\n"); //Environment.Exit(-1); } } else { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } if (hivesToProcess.Count == 0) { _logger.Warn("No hives were found. Exiting..."); return; } _sw = new Stopwatch(); _sw.Start(); foreach (var hiveToProcess in hivesToProcess) { _logger.Info(""); byte[] updatedBytes = null; _logger.Info($"Processing hive '{hiveToProcess}'"); if (File.Exists(hiveToProcess) == false) { _logger.Warn($"'{hiveToProcess}' does not exist. Skipping"); continue; } try { RegistryHive reg; var dirname = Path.GetDirectoryName(hiveToProcess); var hiveBase = Path.GetFileName(hiveToProcess); List <RawCopyReturn> rawFiles = null; try { using (var fs = new FileStream(hiveToProcess, FileMode.Open, FileAccess.Read)) { reg = new RegistryHive(fs.ReadFully(), hiveToProcess) { }; } } catch (IOException) { //file is in use if (Helper.IsAdministrator() == false) { throw new UnauthorizedAccessException("Administrator privileges not found!"); } _logger.Warn($"\t'{hiveToProcess}' is in use. Rerouting...\r\n"); var files = new List <string>(); files.Add(hiveToProcess); var logFiles = Directory.GetFiles(dirname, $"{hiveBase}.LOG?"); foreach (var logFile in logFiles) { files.Add(logFile); } rawFiles = Helper.GetFiles(files); if (rawFiles.First().FileStream.Length == 0) { continue; } var bb = rawFiles.First().FileStream.ReadFully(); reg = new RegistryHive(bb, rawFiles.First().InputFilename); } if (reg.Header.PrimarySequenceNumber != reg.Header.SecondarySequenceNumber) { if (string.IsNullOrEmpty(dirname)) { dirname = "."; } var logFiles = Directory.GetFiles(dirname, $"{hiveBase}.LOG?"); if (logFiles.Length == 0) { if (_fluentCommandLineParser.Object.CopyAlways) { _logger.Info($"\tHive '{hiveToProcess}' is dirty, but no logs were found in the same directory. --ca is true. Copying..."); updatedBytes = File.ReadAllBytes(hiveToProcess); } else { _logger.Info($"\tHive '{hiveToProcess}' is dirty and no transaction logs were found in the same directory. --ca is false. Skipping..."); continue; } } if (updatedBytes == null) { if (rawFiles != null) { var lt = new List <TransactionLogFileInfo>(); foreach (var rawCopyReturn in rawFiles.Skip(1).ToList()) { var bb1 = rawCopyReturn.FileStream.ReadFully(); var tt = new TransactionLogFileInfo(rawCopyReturn.InputFilename, bb1); lt.Add(tt); } updatedBytes = reg.ProcessTransactionLogs(lt); } else { updatedBytes = reg.ProcessTransactionLogs(logFiles.ToList()); } } } if (updatedBytes == null) { if (_fluentCommandLineParser.Object.CopyAlways) { _logger.Info($"\tHive '{hiveToProcess}' is not dirty, but --ca is true. Copying..."); updatedBytes = File.ReadAllBytes(hiveToProcess); } else { _logger.Info($"\tHive '{hiveToProcess}' is not dirty and --ca is false. Skipping..."); continue; } } var outFile = hiveToProcess.Replace(":", "").Replace("\\", "_"); var outFileAll = Path.Combine(_fluentCommandLineParser.Object.OutDirectory, outFile); if (_fluentCommandLineParser.Object.CompressNames && (outFileAll.ToUpperInvariant().Contains("NTUSER") || outFileAll.ToUpperInvariant().Contains("USRCLASS"))) { var dl = hiveToProcess[0].ToString(); var segs = hiveToProcess.SplitAndTrim('\\'); var profile = segs[2]; var filename = Path.GetFileName(hiveToProcess); var outFile2 = $"{dl}_{profile}_{filename}"; outFileAll = Path.Combine(_fluentCommandLineParser.Object.OutDirectory, outFile2); } if (File.Exists(outFileAll)) { var oldOut = outFileAll; outFileAll = Path.Combine(_fluentCommandLineParser.Object.OutDirectory, outFile); _logger.Warn($"\tFile '{oldOut}' exists! Saving as non-compressed name: '{outFileAll}'"); } _logger.Fatal($"\tSaving updated hive to '{outFileAll}'"); using (var fs = new FileStream(outFileAll, FileMode.Create)) { fs.Write(updatedBytes, 0, updatedBytes.Length); fs.Flush(); fs.Close(); } } catch (Exception ex) { if (ex.Message.Contains("Sequence numbers do not match and transaction") == false) { if (ex.Message.Contains("Administrator privileges not found")) { _logger.Fatal($"Could not access '{hiveToProcess}' because it is in use"); _logger.Error(""); _logger.Fatal("Rerun the program with Administrator privileges to try again\r\n"); } else { _logger.Error($"There was an error: {ex.Message}"); } } } } _sw.Stop(); _logger.Info(""); _logger.Info($"Total processing time: {_sw.Elapsed.TotalSeconds:N3} seconds"); _logger.Info(""); }