public static Job GetJob(int jobId, Logger defaultLogger) { Job currentJob; using (var repo = new JobRepository()) { defaultLogger.Info("Passed job with ID of {0}", jobId); currentJob = repo.GetJobById(jobId); if (currentJob == null) { defaultLogger.Warn("Job not found"); return null; } defaultLogger.Info("Job found. URL is {0} and branch is {1}", currentJob.Url, currentJob.Branch); if (currentJob.State != JobState.Pending) { defaultLogger.Warn("Cannot start job. Current state is {0}", currentJob.State); return null; } repo.UpdateStateForJob(currentJob, JobState.Running); } return currentJob; }
public PokemonSearchCommands() { _log = LogManager.GetCurrentClassLogger(); if (File.Exists(PokemonListFile)) { pokemons = JsonConvert.DeserializeObject<Dictionary<string, SearchPokemon>>(File.ReadAllText(PokemonListFile)); } else _log.Warn(PokemonListFile + " is missing. Pokemon abilities not loaded."); if (File.Exists(PokemonAbilitiesFile)) pokemonAbilities = JsonConvert.DeserializeObject<Dictionary<string, SearchPokemonAbility>>(File.ReadAllText(PokemonAbilitiesFile)); else _log.Warn(PokemonAbilitiesFile + " is missing. Pokemon abilities not loaded."); }
public APIProvider() { mdlog = LogManager.GetLogger(Name + ".M"); tdlog = LogManager.GetLogger(Name + ".T"); try { LogManager.Configuration = new XmlLoggingConfiguration(@"Bin/QuantBox.nlog"); } catch(Exception ex) { tdlog.Warn(ex.Message); } timerConnect.Elapsed += timerConnect_Elapsed; timerDisconnect.Elapsed += timerDisconnect_Elapsed; timerAccount.Elapsed += timerAccount_Elapsed; timerPonstion.Elapsed += timerPonstion_Elapsed; InitCallbacks(); InitSettings(); BarFactory = new SmartQuant.Providers.BarFactory(); status = ProviderStatus.Unknown; SmartQuant.Providers.ProviderManager.Add(this); }
static UnitConverterCommands() { _log = LogManager.GetCurrentClassLogger(); try { var data = JsonConvert.DeserializeObject<List<MeasurementUnit>>(File.ReadAllText("data/units.json")).Select(u => new ConvertUnit() { Modifier = u.Modifier, UnitType = u.UnitType, InternalTrigger = string.Join("|", u.Triggers) }).ToArray(); using (var uow = DbHandler.UnitOfWork()) { if (uow.ConverterUnits.Empty()) { uow.ConverterUnits.AddRange(data); uow.Complete(); } } Units = data.ToList(); } catch (Exception e) { _log.Warn("Could not load units: " + e.Message); } }
public static void ProcessResults(JSONDiff jd, Logger l) { if (jd.Messages.Any()) { l.Info("--Issues--"); jd.Messages.ForEach(s2 => { var mess = s2.Message?.Trim(); var exp = s2.Exception?.Trim(); var m = $"{s2.ProblemType} | {mess} | {exp}"; m = m.Replace("\r\n", ""); switch (s2.WarnLevel) { case JSONWarnLevel.Warn: l.Warn(m); break; case JSONWarnLevel.Error: l.Error(m); break; case JSONWarnLevel.Fatal: l.Fatal(m); break; default: throw new ArgumentOutOfRangeException(); } }); l.Info("-----------"); } else { l.Info("--Success--"); l.Info("-----------"); } }
private void TraceMessageInternal(LogLvl level, string message) { if (level > desiredLogLevel) { return; } switch (level) { case LogLvl.Debug: variableLogger?.Debug(message); break; case LogLvl.Info: variableLogger?.Info(message); break; case LogLvl.Warning: variableLogger?.Warn(message); break; case LogLvl.Error: variableErrorLogger?.Error(message); break; default: throw new ArgumentOutOfRangeException(nameof(level), level, null); } }
static void Main() { _log = LogManager.GetCurrentClassLogger(); _log.Warn("Service is about to start"); #if DEBUG LogManager.GlobalThreshold = LogLevel.Trace; #endif AppDomain.CurrentDomain.UnhandledException += CurrentDomainUnhandledException; try { var container = new Container(new SensuClientRegistry()); var sensuClient = container.GetInstance<ISensuClient>() as ServiceBase; ServiceBase[] servicesToRun; servicesToRun = new ServiceBase[] { sensuClient }; if (Environment.UserInteractive) { RunInteractive(servicesToRun); } else { ServiceBase.Run(servicesToRun); } } catch (Exception exception) { _log.Error(exception, "Error in startup sensu-client."); } }
public AutoAssignRoleCommands() { var _client = NadekoBot.Client; this._log = LogManager.GetCurrentClassLogger(); _client.UserJoined += (user) => { var t = Task.Run(async () => { try { GuildConfig conf; using (var uow = DbHandler.UnitOfWork()) { conf = uow.GuildConfigs.For(user.Guild.Id); } if (conf.AutoAssignRoleId == 0) return; var role = user.Guild.Roles.FirstOrDefault(r => r.Id == conf.AutoAssignRoleId); if (role != null) await user.AddRolesAsync(role); } catch (Exception ex) { _log.Warn(ex); } }); return Task.CompletedTask; }; }
static void Main(string[] args) { try { log = LogManager.GetCurrentClassLogger(); BookListService bls = new BookListService(); log.Debug("Старт записи в двоичный файл"); WriteDefaultValues(fileName); log.Debug("Окончание записи в двоичный файл"); log.Debug("Старт чтения данных из двоичног файла"); bls = ReadDefaultValues(fileName); log.Debug("Окончание чтения данных из двоичног файла"); Book[] find = bls.FindByTag("1999", EnumTag.Year); //Поиск foreach (Book b in find) Console.WriteLine(b); Console.WriteLine("-------------------------"); Book[] sort = bls.SortBooksByTag(EnumTag.Page);//Сортировка foreach (Book b in sort) Console.WriteLine(b); //log.Warn("Попытка добавить уже существующую книгу"); //bls.AddBook(sort[0]); bls.RemoveBook(sort[0]);//Удаление книги log.Warn("Попытка удаления книги отсутствующей в каталоге"); bls.RemoveBook(sort[0]); } catch (Exception e) { log.Error(e.Message); } }
public JokeCommands() { _log = LogManager.GetCurrentClassLogger(); if (File.Exists("data/wowjokes.json")) { wowJokes = JsonConvert.DeserializeObject<List<WoWJoke>>(File.ReadAllText("data/wowjokes.json")); } else _log.Warn("data/wowjokes.json is missing. WOW Jokes are not loaded."); if (File.Exists("data/magicitems.json")) { magicItems = JsonConvert.DeserializeObject<List<MagicItem>>(File.ReadAllText("data/magicitems.json")); } else _log.Warn("data/magicitems.json is missing. Magic items are not loaded."); }
public BotCredentials() { _log = LogManager.GetCurrentClassLogger(); try { File.WriteAllText("./credentials_example.json", JsonConvert.SerializeObject(new CredentialsModel(), Formatting.Indented)); } catch { } if(!File.Exists(credsFileName)) _log.Warn($"credentials.json is missing. Attempting to load creds from environment variables prefixed with 'NadekoBot_'. Example is in {Path.GetFullPath("./credentials_example.json")}"); try { var configBuilder = new ConfigurationBuilder(); configBuilder.AddJsonFile(credsFileName, true) .AddEnvironmentVariables("NadekoBot_"); var data = configBuilder.Build(); Token = data[nameof(Token)]; if (string.IsNullOrWhiteSpace(Token)) throw new ArgumentNullException(nameof(Token), "Token is missing from credentials.json or Environment varibles."); OwnerIds = data.GetSection("OwnerIds").GetChildren().Select(c => ulong.Parse(c.Value)).ToArray(); LoLApiKey = data[nameof(LoLApiKey)]; GoogleApiKey = data[nameof(GoogleApiKey)]; MashapeKey = data[nameof(MashapeKey)]; OsuApiKey = data[nameof(OsuApiKey)]; int ts = 1; int.TryParse(data[nameof(TotalShards)], out ts); TotalShards = ts < 1 ? 1 : ts; ulong clId = 0; ulong.TryParse(data[nameof(ClientId)], out clId); ClientId = clId; SoundCloudClientId = data[nameof(SoundCloudClientId)]; CarbonKey = data[nameof(CarbonKey)]; var dbSection = data.GetSection("db"); Db = new DBConfig(string.IsNullOrWhiteSpace(dbSection["Type"]) ? "sqlite" : dbSection["Type"], string.IsNullOrWhiteSpace(dbSection["ConnectionString"]) ? "Filename=./data/NadekoBot.db" : dbSection["ConnectionString"]); } catch (Exception ex) { _log.Fatal(ex.Message); _log.Fatal(ex); throw; } }
public RemindCommands() { _log = LogManager.GetCurrentClassLogger(); List<Reminder> reminders; using (var uow = DbHandler.UnitOfWork()) { reminders = uow.Reminders.GetAll().ToList(); RemindMessageFormat = uow.BotConfig.GetOrCreate().RemindMessageFormat; } foreach (var r in reminders) { try { var t = StartReminder(r); } catch (Exception ex) { _log.Warn(ex); } } }
public TCPTransport(IPAddress server, int serverPort) { this.serverIP = server; this.serverPort = serverPort; logger = LogManager.GetLogger("TCPTransport"); try { tcpClient = new TcpClient(); tcpClient.Connect(server, serverPort); this.buff = new BufferedStream(tcpClient.GetStream()); this.bWriter = new BinaryWriter(buff); this.bReader = new BinaryReader(tcpClient.GetStream()); String message = string.Format("Connected to server :" + server.ToString() + " : " + serverPort); logger.Trace(message); } catch (Exception e) { String message = string.Format("Could not connect to server :" + server.ToString() + " : " + serverPort); logger.Warn("Infinispan.DotNetClient", message); throw new TransportException(message, e); } }
static void Main(string[] args) { ConfigurationItemFactory.Default.Targets.RegisterDefinition("SignalTarget", typeof(SignalTarget)); Logger = LogManager.GetCurrentClassLogger(typeof(SignalTarget)); var rnd = new Random((int)DateTime.Now.Ticks); for (int i = 0; i < 100; i++) { Logger.Trace("Sample trace message from NLog"); Logger.Debug("Sample debug message from NLog"); Logger.Info("Sample informational message from NLog"); Logger.Warn("Sample warning message from NLog"); Logger.Error("Sample error message from NLog", new Exception("Something bad happened!")); Logger.Fatal("Sample fatal error message from NLog"); var sleep = rnd.Next(20, 250); Console.WriteLine(string.Concat("Sleeping...:", sleep, "ms")); Thread.Sleep(sleep); } Console.WriteLine("Logging Complete. Press enter to continue..."); Console.ReadLine(); }
public TCPTransport(IPEndPoint endPoint) { ipEndPoint = endPoint; logger = LogManager.GetLogger("TCPTransport"); try { tcpClient = new TcpClient(); tcpClient.Connect(ipEndPoint.Address, ipEndPoint.Port); // tcpClient.Connect(IPAddress.Loopback, 11222); this.buff = new BufferedStream(tcpClient.GetStream()); this.bWriter = new BinaryWriter(buff); this.bReader = new BinaryReader(tcpClient.GetStream()); String message = string.Format("Connected to server :" + ipEndPoint.Address.ToString() + " : " + ipEndPoint.Port); logger.Trace(message); } catch (Exception e) { String message = string.Format("Could not connect to server :" + ipEndPoint.Address.ToString() + " : " + ipEndPoint.Port); logger.Warn("Infinispan.DotNetClient", message); throw new TransportException(message, e); } }
public void Warn(string info) { _logger.Warn(info); }
public static void Warn([Localizable(false)] string message, params object[] args) { Log.Warn(message, args); }
void Startup() { #region INISetup try { if (!File.Exists(iniPath)) { //INI File is not present in application directory //Create one by hand and set default settings; Console.WriteLine("[STARTUP] INI File not found, Generating one with default values"); ini = new INI.IniFile(); ini.Section("General").Set("LogToFile", "True"); ini.Section("General").Set("LogLevel", "Debug"); ini.Section("General").Set("LogToConsole", "False"); ini.Section("General").Set("ConsoleLogLevel", "Warn"); ini.Section("General").Set("LogDir", Environment.CurrentDirectory + "\\Logs"); ini.Section("General").Set("AppDir", Environment.CurrentDirectory + "\\Applications"); ini.Section("General").Set("SourceDir", Environment.CurrentDirectory + "\\Source"); ini.Section("General").Set("ToolDir", Environment.CurrentDirectory + "\\Tools"); ini.Section("Optional").Set("ClearBeforeMain", "True", "This will clear the console before displaying main menu"); ini.Save(iniPath); } //Reading INI File and setting variables; ini = new INI.IniFile(iniPath); Console.WriteLine("[STARTUP] Reading INI values..."); logLevel = ini.Section("General").Get("LogLevel"); wtc.WriteWhite("[STARTUP] LogLevel: "); wtc.WriteGreen(logLevel + "\n"); bLogToConsole = Boolean.Parse(ini.Section("General").Get("LogToConsole")); wtc.WriteWhite("[STARTUP] LogToConsole: "); wtc.WriteGreen(bLogToConsole.ToString() + "\n"); consoleLogLevel = ini.Section("General").Get("ConsoleLogLevel"); wtc.WriteWhite("[STARTUP] ConsoleLogLevel: "); wtc.WriteGreen(consoleLogLevel + "\n"); logDir = ini.Section("General").Get("LogDir"); wtc.WriteWhite("[STARTUP] Log Directory: "); wtc.WriteGreen(logDir + "\n"); appDir = ini.Section("General").Get("AppDir"); wtc.WriteWhite("[STARTUP] Application Directory: "); wtc.WriteGreen(appDir + "\n"); sourceDir = ini.Section("General").Get("SourceDir"); wtc.WriteWhite("[STARTUP] Source Directory: "); wtc.WriteGreen(sourceDir + "\n"); toolDir = ini.Section("General").Get("ToolDir"); wtc.WriteWhite("[STARTUP] Tool Directory: "); wtc.WriteGreen(toolDir + "\n"); //Optional INI settings //WE don't care if their missing, just to tailor the experience to each user; wtc.WriteWhiteLine("[STARTUP] Checking for optional INI settings"); try { wtc.WriteWhite("[OPTIONAL] Checking for ClearBeforeMain"); bClearBeforeMenu = Boolean.Parse(ini.Section("Optional").Get("ClearBeforeMain")); } catch { } //Checking for Logging directory first so we can log missing folder structure later. if (!Directory.Exists(logDir)) { wtc.WriteWhiteLine("[STARTUP] No logDirectory found, attempting to create"); //Try to create the logDir, Fail out and throw error if not try { Directory.CreateDirectory(logDir); wtc.WriteWhite("[STARTUP] Creating LogDir at "); wtc.WriteGreenLine(logDir); } catch (Exception ex) { //Unable to create the directory, throw fatal error and exit wtc.WriteRedLine("Fatal Error: " + ex.Message); Console.ReadKey(); Environment.Exit(5); } } #endregion #region Logging var config = new LoggingConfiguration(); var fileTarget = new FileTarget(); config.AddTarget("file", fileTarget); fileTarget.Layout = "[${longdate}] - [${level}]: ${message}"; fileTarget.FileName = logDir + "\\Main.log"; LoggingRule rule_LTF; switch (logLevel.ToUpper()) { case "TRACE": rule_LTF = new LoggingRule("*", LogLevel.Trace, fileTarget); wtc.WriteWhite("[STARTUP] LogToFileLevel set to: "); wtc.WriteGreen(logLevel + "\n"); break; case "DEBUG": rule_LTF = new LoggingRule("*", LogLevel.Debug, fileTarget); wtc.WriteWhite("[STARTUP] LogToFileLevel set to: "); wtc.WriteGreen(logLevel + "\n"); break; case "WARN": rule_LTF = new LoggingRule("*", LogLevel.Warn, fileTarget); wtc.WriteWhite("[STARTUP] LogToFileLevel set to: "); wtc.WriteGreen(logLevel + "\n"); break; case "INFO": rule_LTF = new LoggingRule("*", LogLevel.Info, fileTarget); wtc.WriteWhite("[STARTUP] LogToFileLevel set to: "); wtc.WriteGreen(logLevel + "\n"); break; case "ERROR": rule_LTF = new LoggingRule("*", LogLevel.Error, fileTarget); wtc.WriteWhite("[STARTUP] LogToFileLevel set to: "); wtc.WriteGreen(logLevel + "\n"); break; default: wtc.WriteRedLine("[STARTUP] Uknown type " + logLevel + " defaulting to WARN"); rule_LTF = new LoggingRule("*", LogLevel.Warn, fileTarget); break; } config.LoggingRules.Add(rule_LTF); if (bLogToConsole) { var consoleTarget = new ColoredConsoleTarget(); config.AddTarget("console", consoleTarget); consoleTarget.Layout = "[${longdate}] - [${level}]: ${message}"; LoggingRule rule_LTC; switch (consoleLogLevel.ToUpper()) { case "TRACE": rule_LTC = new LoggingRule("*", LogLevel.Trace, consoleTarget); wtc.WriteWhite("[STARTUP] ConsoleLogLevel set to: "); wtc.WriteGreen(consoleLogLevel + "\n"); break; case "DEBUG": rule_LTC = new LoggingRule("*", LogLevel.Debug, consoleTarget); wtc.WriteWhite("[STARTUP] ConsoleLogLevel set to: "); wtc.WriteGreen(consoleLogLevel + "\n"); break; case "WARN": rule_LTC = new LoggingRule("*", LogLevel.Warn, consoleTarget); wtc.WriteWhite("[STARTUP] ConsoleLogLevel set to: "); wtc.WriteGreen(consoleLogLevel + "\n"); break; case "INFO": rule_LTC = new LoggingRule("*", LogLevel.Info, consoleTarget); wtc.WriteWhite("[STARTUP] ConsoleLogLevel set to: "); wtc.WriteGreen(consoleLogLevel + "\n"); break; case "ERROR": rule_LTC = new LoggingRule("*", LogLevel.Error, consoleTarget); wtc.WriteWhite("[STARTUP] ConsoleLogLevel set to: "); wtc.WriteGreen(consoleLogLevel + "\n"); break; default: wtc.WriteRedLine("[STARTUP] Uknown type " + consoleLogLevel + " defaulting to WARN"); rule_LTC = new LoggingRule("*", LogLevel.Warn, fileTarget); break; } config.LoggingRules.Add(rule_LTC); } else { wtc.WriteWhite("[STARTUP] LogToConsole set to: "); wtc.WriteRed(bLogToConsole.ToString()); wtc.WriteWhiteLine(" - Skipping level check"); } LogManager.Configuration = config; logger = LogManager.GetCurrentClassLogger(); logger.Debug("============================"); logger.Debug("Application Started"); logger.Debug("============================"); logger.Debug("Exporting settings to log"); logger.Debug("LogLevel: " + logLevel); logger.Debug("LogToConsole " + bLogToConsole.ToString()); logger.Debug("ConsoleLogLevel: " + consoleLogLevel); logger.Debug("LogDir: " + logDir); logger.Debug("AppDir: " + appDir); logger.Debug("SourceDir: " + sourceDir); logger.Debug("ToolDir: " + toolDir); } catch (Exception ex) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Critical Error: "); Console.WriteLine(ex.Message); Console.WriteLine("Application will now close"); Console.ReadKey(); Environment.Exit(1605); } #endregion #region DirectoryCheck //Broke up the Try Catch so we can get the error and parse it to the log versus the console before logging exists. try { //Check through each directory to make sure they exist logger.Debug("Checking through each directory to make sure they exist"); wtc.WriteWhiteLine("[STARTUP] Checking for AppDirectory"); logger.Debug("Checking for AppDirectory at " + appDir); if (!Directory.Exists(appDir)) { logger.Debug("No Directory found at " + appDir); //Try to create the appDir, Fail out and throw error if not try { logger.Debug("Attempting to create directory at " + appDir); Directory.CreateDirectory(appDir); wtc.WriteWhite("[STARTUP] Creating AppDir at "); wtc.WriteGreenLine(appDir); } catch (Exception ex) { //Unable to create the directory, throw fatal error and exit logger.Error("Unable to create directory at " + appDir); logger.Error("Failed with error: " + ex.Message); wtc.WriteRedLine("Fatal Error: " + ex.Message); Console.ReadKey(); Environment.Exit(6); } } else { logger.Debug("AppDirectory exists at " + appDir); wtc.WriteGreenLine("[STARTUP] Application Directory exists!"); } wtc.WriteWhiteLine("[STARTUP] Checking for SourceDirectory"); logger.Debug("Checking for SourceDirectory at " + sourceDir); if (!Directory.Exists(sourceDir)) { logger.Debug("No Directory found at " + sourceDir); //Try to create the appDir, Fail out and throw error if not try { logger.Debug("Attempting to create directory at " + sourceDir); Directory.CreateDirectory(sourceDir); wtc.WriteWhite("[STARTUP] Creating SourceDir at "); wtc.WriteGreenLine(sourceDir); } catch (Exception ex) { //Unable to create the directory, throw fatal error and exit logger.Error("Unable to create directory at " + sourceDir); logger.Error("Failed with error: " + ex.Message); wtc.WriteRedLine("Fatal Error: " + ex.Message); Console.ReadKey(); Environment.Exit(7); } } else { logger.Debug("SourceDirectory exists at " + sourceDir); wtc.WriteGreenLine("[STARTUP] Source Directory exists!"); } wtc.WriteWhiteLine("[STARTUP] Checking for ToolDirectory"); logger.Debug("Checking for ToolDirectory at " + sourceDir); if (!Directory.Exists(toolDir)) { logger.Debug("No Directory found at " + toolDir); //Try to create the appDir, Fail out and throw error if not try { logger.Debug("Attempting to create directory at " + sourceDir); Directory.CreateDirectory(toolDir); wtc.WriteWhite("[STARTUP] Creating ToolDir at "); wtc.WriteGreenLine(toolDir); } catch (Exception ex) { //Unable to create the directory, throw fatal error and exit logger.Error("Unable to create directory at " + sourceDir); logger.Error("Failed with error: " + ex.Message); wtc.WriteRedLine("Fatal Error: " + ex.Message); Console.ReadKey(); Environment.Exit(7); } } else { logger.Debug("ToolDirectory exists at " + toolDir); wtc.WriteGreenLine("[STARTUP] Tool Directory exists!"); } //Check for Write/Read/Delete Permissions in directories; logger.Debug("Checking for Write/Read/Delete Permissions in directories"); try { //APPDIR logger.Debug("Creating file TEST in " + appDir); File.WriteAllText(appDir + "\\test.test", ""); logger.Debug(appDir + "\\test.test" + " - File Created!"); logger.Debug("Deleting File " + appDir + "\\test.test"); File.Delete(appDir + "\\test.test"); logger.Debug(appDir + "\\test.test" + " - File Deleted!"); //SOURCEDIR logger.Debug("Creating file TEST in " + sourceDir); File.WriteAllText(sourceDir + "\\test.test", ""); logger.Debug(sourceDir + "\\test.test" + " - File Created!"); logger.Debug("Deleting File " + sourceDir + "\\test.test"); File.Delete(sourceDir + "\\test.test"); logger.Debug(sourceDir + "\\test.test" + " - File Deleted!"); //TOOLDIR logger.Debug("Creating file TEST in " + toolDir); File.WriteAllText(toolDir + "\\test.test", ""); logger.Debug(toolDir + "\\test.test" + " - File Created!"); logger.Debug("Deleting File " + toolDir + "\\test.test"); File.Delete(toolDir + "\\test.test"); logger.Debug(toolDir + "\\test.test" + " - File Deleted!"); } catch (Exception ex) { logger.Fatal(ex.Message); wtc.WriteRedLine("[FATAL ERROR] " + ex.Message); Console.ReadKey(); Environment.Exit(8); } #endregion #region ToolSupport //This seciton needs to get improved as we add more tool support; //Currently going to check for PSEXEC, will need to validate more as we use more. //IDEA MIRROR SUPPORT; //DOWNLOAD TOOLS NEEDED VIA A MIRROR AND VERIFY MD5 //DEFINE TOOLS IN INI? if (!File.Exists(toolDir + "\\psexec.exe")) { //PSEXEC is missing; logger.Warn("Unable to find psexec in the following location [" + toolDir + "\\psexec.exe]"); logger.Warn("Any applications that use PSEXEC will not function!"); bPsexecMissing = true; wtc.WriteYellowLine("[STARTUP] PSEXEC is missing from the Tools directory. Please make sure the exe is in the given path, or change the \"ToolDir\" path in your ini to where PSEXEC exists"); wtc.WriteYellowLine("[WARNING] Program will continue, any application that uses PSEXEC as the install driver will not function till this is resolved"); } } catch (Exception ex) { wtc.WriteRedLine("[FATAL ERROR] " + ex.Message); logger.Fatal(ex.Message); Console.ReadKey(); Environment.Exit(2); } #endregion }
public void Warning(Exception e) { System.Diagnostics.Debug.WriteLine(e); _log.Warn(e); }
public void Warn(string msg, params object[] args) { _logger.Warn(msg, args); }
private async Task <MyUserState> ShareChummer_DoWork() { string hash = ""; try { using (var op_shareChummer = Timekeeper.StartSyncron("Share Chummer", null, CustomActivity.OperationType.DependencyOperation, MyCharacterCache?.FilePath)) { MyUserState myState = new MyUserState(this); CharacterExtended ce = null; var client = StaticUtils.GetClient(); string sinnerid = ""; Guid SINid = Guid.Empty; async Task <CharacterExtended> GetCharacterExtended(CustomActivity parentActivity) { using (var op_prepChummer = Timekeeper.StartSyncron("Loading Chummerfile", parentActivity, CustomActivity.OperationType.DependencyOperation, MyCharacterCache?.FilePath)) { Character c = new Character() { FileName = MyCharacterCache.FilePath }; var foundchar = (from a in PluginHandler.MainForm.OpenCharacters where a.FileName == MyCharacterCache.FilePath select a).ToList(); if (foundchar?.Any() == true) { c = foundchar?.FirstOrDefault(); } else { using (frmLoading frmLoadingForm = new frmLoading { CharacterFile = MyCharacterCache.FilePath }) { frmLoadingForm.Reset(36); frmLoadingForm.TopMost = true; frmLoadingForm.Show(); myState.StatusText = "Loading chummer file..."; myState.CurrentProgress += 10; ReportProgress(myState.CurrentProgress, myState); await c.Load(frmLoadingForm, false); } } if (c == null) { throw new ArgumentNullException("Could not load Character file " + MyCharacterCache.FilePath + "."); } ce = new CharacterExtended(c, null, null, MyCharacterCache); if (ce?.MySINnerFile?.Id != null) { sinnerid = ce.MySINnerFile.Id.ToString(); } hash = ce?.MySINnerFile?.MyHash; return(ce); } } if (MyCharacterCache.MyPluginDataDic.TryGetValue("SINnerId", out Object sinneridobj)) { sinnerid = sinneridobj?.ToString(); } else { ce = await GetCharacterExtended(op_shareChummer); sinnerid = ce.MySINnerFile.Id.ToString(); hash = ce?.MySINnerFile?.MyHash; } if ((String.IsNullOrEmpty(sinnerid) || (!Guid.TryParse(sinnerid, out SINid)))) { myState.StatusText = "SINner Id is unknown or not issued!"; ReportProgress(30, myState); } else { myState.StatusText = "SINner Id is " + SINid + "."; myState.CurrentProgress = 30; ReportProgress(myState.CurrentProgress, myState); } HttpOperationResponse <ResultSinnerGetSINById> checkresult = null; //check if char is already online and updated using (var op_checkOnlineVersionChummer = Timekeeper.StartSyncron( "check if online", op_shareChummer, CustomActivity.OperationType.DependencyOperation, MyCharacterCache?.FilePath)) { checkresult = await client.GetSINByIdWithHttpMessagesAsync(SINid); if (checkresult == null) { throw new ArgumentException("Could not parse result from SINners Webservice!"); } if (checkresult.Response.StatusCode != HttpStatusCode.NotFound) { if (checkresult.Body.CallSuccess != true) { if (checkresult.Body.MyException is Exception myException) { throw new ArgumentException( "Error from SINners Webservice: " + checkresult.Body.ErrorText, myException); } else { throw new ArgumentException("Error from SINners Webservice: " + checkresult.Body.ErrorText); } } else { hash = checkresult.Body.MySINner.MyHash; } } } var lastWriteTimeUtc = System.IO.File.GetLastWriteTimeUtc(MyCharacterCache.FilePath); if (checkresult.Response.StatusCode == HttpStatusCode.NotFound || (checkresult.Body.MySINner.LastChange < lastWriteTimeUtc)) { if (ce == null) { myState.StatusText = "The Chummer is newer and has to be uploaded again."; myState.CurrentProgress = 30; ReportProgress(myState.CurrentProgress, myState); ce = await GetCharacterExtended(op_shareChummer); } using (var op_uploadChummer = Timekeeper.StartSyncron( "Uploading Chummer", op_shareChummer, CustomActivity.OperationType.DependencyOperation, MyCharacterCache?.FilePath)) { myState.StatusText = "Checking SINner availability (and if necessary upload it)."; myState.CurrentProgress = 35; ReportProgress(myState.CurrentProgress, myState); myState.ProgressSteps = 10; var uploadtask = await ce.Upload(myState, op_uploadChummer); SINid = ce.MySINnerFile.Id.Value; var result = await client.GetSINByIdWithHttpMessagesAsync(SINid); if (result == null) { throw new ArgumentException("Could not parse result from SINners Webservice!"); } if (result.Body?.CallSuccess != true) { if (result.Body?.MyException is Exception myException) { throw new ArgumentException( "Error from SINners Webservice: " + result.Body?.ErrorText, myException); } else { throw new ArgumentException( "Error from SINners Webservice: " + result.Body?.ErrorText); } } else { hash = result.Body.MySINner.MyHash; } } } myState.StatusText = "SINner is online available."; myState.CurrentProgress = 90; ReportProgress(myState.CurrentProgress, myState); string url = client.BaseUri + "O"; url += "/" + hash; myState.LinkText = url; ReportProgress(100, myState); RunWorkerCompleted(myState); return(myState); } } catch (Exception exception) { Log.Warn(exception); throw; } }
public void Warn(string msg) { _logger.Warn(msg); }
public void Warn(string message) { loggerSender.Warn(message); }
public void Warn(object message) { logger.Warn(message); }
public static bool HasWriteMetadataErrors(Metadata metadataRead, /* Data read back after saved and need to be verifyed */ List <Metadata> metadataWrittenByExiftoolWaitVerify, /* This what should have been saved, check if same info read back */ out Metadata metadataUpdatedByUserCopy, out string errorMessage) { //Out parameter default errorMessage = ""; metadataUpdatedByUserCopy = null; if (metadataWrittenByExiftoolWaitVerify.Count == 0) { return(false); } bool foundErrors = false; int verifyPosition = Metadata.FindFullFilenameInList(metadataWrittenByExiftoolWaitVerify, metadataRead.FileEntryBroker.FileFullPath); if (verifyPosition != -1) { ///Remove from list and add back to Read Exif once more if (metadataRead.FileEntryBroker.LastWriteDateTime > metadataWrittenByExiftoolWaitVerify[verifyPosition].FileDateModified) { string fileErrorMessage = "File has been updated between writing and read back using exiftool.\r\n" + "This can occure when OneDrive, GoogleDrive, Dropbox, iDrive, Box etc... change dates during syncing files.\r\n" + "File modified before Exiftool: " + metadataRead.FileEntryBroker.LastWriteDateTime.ToString() + "\r\n" + "File modified after Exiftool: " + metadataWrittenByExiftoolWaitVerify[verifyPosition].FileDateModified.ToString(); errorMessage += (string.IsNullOrWhiteSpace(errorMessage) ? "" : "\r\n") + fileErrorMessage; Logger.Warn("File with error: " + metadataRead.FileFullPath + "\r\n" + errorMessage); foundErrors = true; } } if (verifyPosition == -1) { return(false); //No need for verify, the metadata was only read, most likly first time read (without save, read and verify) } metadataUpdatedByUserCopy = new Metadata(metadataWrittenByExiftoolWaitVerify[verifyPosition]); //Copy data to verify metadataWrittenByExiftoolWaitVerify.RemoveAt(verifyPosition); //Remove old versions of "Need to be veriyfied" bool foundOldVersionToVerify; //Happens when multiple save are done and save faild, and veridify was not done for each media file do { foundOldVersionToVerify = false; int indexFound = Metadata.FindFullFilenameInList(metadataWrittenByExiftoolWaitVerify, metadataRead.FileFullPath); if (indexFound > -1 && indexFound < metadataWrittenByExiftoolWaitVerify.Count) { metadataWrittenByExiftoolWaitVerify.RemoveAt(indexFound); foundOldVersionToVerify = true; } } while (foundOldVersionToVerify); //metadataUpdatedByUserCopy.FileDateModified = metadataRead.FileDateModified; //After save, this was updated metadataUpdatedByUserCopy.FileDateAccessed = metadataRead.FileDateAccessed; //This has changed, do not care metadataUpdatedByUserCopy.FileSize = metadataRead.FileSize; //This has changed, do not care metadataUpdatedByUserCopy.Errors = metadataRead.Errors; //This has changed, do not care, Hopefully this is gone metadataUpdatedByUserCopy.Broker = metadataRead.Broker; //This has changed, do not care if (metadataUpdatedByUserCopy.MediaHeight == metadataRead.MediaWidth && metadataUpdatedByUserCopy.MediaWidth == metadataRead.MediaHeight) //Media has been Rotated { metadataUpdatedByUserCopy.MediaHeight = metadataRead.MediaHeight; metadataUpdatedByUserCopy.MediaWidth = metadataRead.MediaWidth; } if (metadataRead.TryParseDateTakenToUtc(out DateTime? dateTimeOffset) && Math.Abs((((DateTime)dateTimeOffset).ToUniversalTime() - ((DateTime)metadataRead.FileDateCreated).ToUniversalTime()).TotalSeconds) < 2) { metadataUpdatedByUserCopy.FileDateCreated = metadataRead.FileDateCreated; //File create date has been set to Media Taken } if (metadataRead != metadataUpdatedByUserCopy) { errorMessage += (string.IsNullOrWhiteSpace(errorMessage) ? "" : "\r\n") + "Metadata errors:\r\n" + Metadata.GetErrors(metadataUpdatedByUserCopy, metadataRead); Logger.Warn("File with error: " + metadataUpdatedByUserCopy.FileFullPath + "\r\n" + errorMessage); foundErrors = true; } return(foundErrors); }
public static void Warn(string message) => logger.Warn(message);
public static void Warn(string message) { message = GetUserInfo() + message; logger.Warn(message); }
public void Warn(LogOutputProvider messageProvider) { _log.Warn(ToGenerator(messageProvider)); }
private static void Main(string[] args) { Licensing.RegisterLicenseFromFileIfExists(SSLicenseFile); SetupNLog(); _keywords = new HashSet<string> {"temp", "tmp"}; _logger = LogManager.GetCurrentClassLogger(); if (!CheckForDotnet46()) { _logger.Warn(".net 4.6 not detected. Please install .net 4.6 and try again."); return; } _fluentCommandLineParser = new FluentCommandLineParser<ApplicationArguments> { IsCaseSensitive = false }; _fluentCommandLineParser.Setup(arg => arg.File) .As('f') .WithDescription("File to process. Either this or -d is required"); _fluentCommandLineParser.Setup(arg => arg.Directory) .As('d') .WithDescription("Directory to recursively process. Either this or -f is required"); _fluentCommandLineParser.Setup(arg => arg.Keywords) .As('k') .WithDescription( "Comma separated list of keywords to highlight in output. By default, 'temp' and 'tmp' are highlighted. Any additional keywords will be added to these."); _fluentCommandLineParser.Setup(arg => arg.Quiet) .As('q') .WithDescription( "Do not dump full details about each file processed. Speeds up processing when using --json or --csv\r\n") .SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.JsonDirectory) .As("json") .WithDescription( "Directory to save json representation to. Use --pretty for a more human readable layout"); _fluentCommandLineParser.Setup(arg => arg.CsvDirectory) .As("csv") .WithDescription( "Directory to save CSV (tab separated) results to. Be sure to include the full path in double quotes"); // _fluentCommandLineParser.Setup(arg => arg.XmlDirectory) // .As("xml") // .WithDescription( // "Directory to save XML formatted results to. Be sure to include the full path in double quotes"); _fluentCommandLineParser.Setup(arg => arg.xHtmlDirectory) .As("html") .WithDescription( "Directory to save xhtml formatted results to. Be sure to include the full path in double quotes"); _fluentCommandLineParser.Setup(arg => arg.JsonPretty) .As("pretty") .WithDescription( "When exporting to json, use a more human readable layout\r\n").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.LocalTime) .As("local") .WithDescription( "Display dates using timezone of machine PECmd is running on vs. UTC\r\n").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.DateTimeFormat) .As("dt") .WithDescription( "The custom date/time format to use when displaying time stamps. Default is: yyyy-MM-dd HH:mm:ss K").SetDefault("yyyy-MM-dd HH:mm:ss K"); _fluentCommandLineParser.Setup(arg => arg.PreciseTimestamps) .As("mp") .WithDescription( "When true, display higher precision for time stamps. Default is false").SetDefault(false); var header = $"PECmd version {Assembly.GetExecutingAssembly().GetName().Version}" + "\r\n\r\nAuthor: Eric Zimmerman ([email protected])" + "\r\nhttps://github.com/EricZimmerman/PECmd"; var footer = @"Examples: PECmd.exe -f ""C:\Temp\CALC.EXE-3FBEF7FD.pf""" + "\r\n\t " + @" PECmd.exe -f ""C:\Temp\CALC.EXE-3FBEF7FD.pf"" --json ""D:\jsonOutput"" --jsonpretty" + "\r\n\t " + @" PECmd.exe -d ""C:\Temp"" -k ""system32, fonts""" + "\r\n\t " + @" PECmd.exe -d ""C:\Temp"" --csv ""c:\temp"" --local --json c:\temp\json" + "\r\n\t " + // @" PECmd.exe -f ""C:\Temp\someOtherFile.txt"" --lr cc -sa" + "\r\n\t " + // @" PECmd.exe -f ""C:\Temp\someOtherFile.txt"" --lr cc -sa -m 15 -x 22" + "\r\n\t " + @" PECmd.exe -d ""C:\Windows\Prefetch""" + "\r\n\t " + "\r\n\t" + " Short options (single letter) are prefixed with a single dash. Long commands are prefixed with two dashes\r\n"; _fluentCommandLineParser.SetupHelp("?", "help") .WithHeader(header) .Callback(text => _logger.Info(text + "\r\n" + footer)); var result = _fluentCommandLineParser.Parse(args); if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.File) && UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.Directory)) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("Either -f or -d is required. Exiting"); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.File) == false && !File.Exists(_fluentCommandLineParser.Object.File)) { _logger.Warn($"File '{_fluentCommandLineParser.Object.File}' not found. Exiting"); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.Directory) == false && !Directory.Exists(_fluentCommandLineParser.Object.Directory)) { _logger.Warn($"Directory '{_fluentCommandLineParser.Object.Directory}' not found. Exiting"); return; } if (_fluentCommandLineParser.Object.Keywords?.Length > 0) { var kws = _fluentCommandLineParser.Object.Keywords.Split(new[] {','}, StringSplitOptions.RemoveEmptyEntries); foreach (var kw in kws) { _keywords.Add(kw.Trim()); } } _logger.Info(header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}"); _logger.Info(""); _logger.Info($"Keywords: {string.Join(", ", _keywords)}"); _logger.Info(""); if (_fluentCommandLineParser.Object.PreciseTimestamps) { _fluentCommandLineParser.Object.DateTimeFormat = _preciseTimeFormat; } _processedFiles = new List<IPrefetch>(); _failedFiles = new List<string>(); if (_fluentCommandLineParser.Object.File?.Length > 0) { IPrefetch pf = null; try { pf = LoadFile(_fluentCommandLineParser.Object.File); if (pf != null) { _processedFiles.Add(pf); } } catch (UnauthorizedAccessException ex) { _logger.Error( $"Unable to access '{_fluentCommandLineParser.Object.File}'. Are you running as an administrator? Error: {ex.Message}"); } catch (Exception ex) { _logger.Error( $"Error getting prefetch files in '{_fluentCommandLineParser.Object.Directory}'. Error: {ex.Message}"); } } else { _logger.Info($"Looking for prefetch files in '{_fluentCommandLineParser.Object.Directory}'"); _logger.Info(""); string[] pfFiles = null; try { pfFiles = Directory.GetFiles(_fluentCommandLineParser.Object.Directory, "*.pf", SearchOption.AllDirectories); } catch (UnauthorizedAccessException ua) { _logger.Error( $"Unable to access '{_fluentCommandLineParser.Object.Directory}'. Are you running as an administrator? Error: {ua.Message}"); return; } catch (Exception ex) { _logger.Error( $"Error getting prefetch files in '{_fluentCommandLineParser.Object.Directory}'. Error: {ex.Message}"); return; } _logger.Info($"Found {pfFiles.Length:N0} Prefetch files"); _logger.Info(""); var sw = new Stopwatch(); sw.Start(); foreach (var file in pfFiles) { var pf = LoadFile(file); if (pf != null) { _processedFiles.Add(pf); } } sw.Stop(); if (_fluentCommandLineParser.Object.Quiet) { _logger.Info(""); } _logger.Info( $"Processed {pfFiles.Length - _failedFiles.Count:N0} out of {pfFiles.Length:N0} files in {sw.Elapsed.TotalSeconds:N4} seconds"); if (_failedFiles.Count > 0) { _logger.Info(""); _logger.Warn("Failed files"); foreach (var failedFile in _failedFiles) { _logger.Info($" {failedFile}"); } } } if (_processedFiles.Count > 0) { _logger.Info(""); try { CsvWriter csv = null; StreamWriter streamWriter = null; CsvWriter csvTl = null; StreamWriter streamWriterTl = null; if (_fluentCommandLineParser.Object.CsvDirectory?.Length > 0) { var outName = $"{DateTimeOffset.Now.ToString("yyyyMMddHHmmss")}_PECmd_Output.tsv"; var outNameTl = $"{DateTimeOffset.Now.ToString("yyyyMMddHHmmss")}_PECmd_Output_Timeline.tsv"; var outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); var outFileTl = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outNameTl); if (Directory.Exists(_fluentCommandLineParser.Object.CsvDirectory) == false) { _logger.Warn($"Path to '{_fluentCommandLineParser.Object.CsvDirectory}' doesn't exist. Creating..."); Directory.CreateDirectory(_fluentCommandLineParser.Object.CsvDirectory); } _logger.Warn($"CSV (tab separated) output will be saved to '{outFile}'"); _logger.Warn($"CSV time line (tab separated) output will be saved to '{outFileTl}'"); try { streamWriter = new StreamWriter(outFile); csv = new CsvWriter(streamWriter); csv.Configuration.Delimiter = $"{'\t'}"; csv.WriteHeader(typeof(CsvOut)); streamWriterTl = new StreamWriter(outFileTl); csvTl = new CsvWriter(streamWriterTl); csvTl.Configuration.Delimiter = $"{'\t'}"; csvTl.WriteHeader(typeof(CsvOutTl)); } catch (Exception ex) { _logger.Error( $"Unable to open '{outFile}' for writing. CSV export canceled. Error: {ex.Message}"); } } if (_fluentCommandLineParser.Object.JsonDirectory?.Length > 0) { _logger.Warn($"Saving json output to '{_fluentCommandLineParser.Object.JsonDirectory}'"); } XmlTextWriter xml = null; if (_fluentCommandLineParser.Object.xHtmlDirectory?.Length > 0) { var outDir = Path.Combine(_fluentCommandLineParser.Object.xHtmlDirectory, $"{DateTimeOffset.UtcNow.ToString("yyyyMMddHHmmss")}_PECmd_Output_for_{_fluentCommandLineParser.Object.xHtmlDirectory.Replace(@":\", "_").Replace(@"\", "_")}"); if (Directory.Exists(outDir) == false) { Directory.CreateDirectory(outDir); } var styleDir = Path.Combine(outDir, "styles"); if (Directory.Exists(styleDir) == false) { Directory.CreateDirectory(styleDir); } File.WriteAllText(Path.Combine(styleDir, "normalize.css"), Resources.normalize); File.WriteAllText(Path.Combine(styleDir, "style.css"), Resources.style); Resources.directories.Save(Path.Combine(styleDir, "directories.png")); Resources.filesloaded.Save(Path.Combine(styleDir, "filesloaded.png")); var outFile = Path.Combine(_fluentCommandLineParser.Object.xHtmlDirectory, outDir, "index.xhtml"); _logger.Warn($"Saving HTML output to '{outFile}'"); xml = new XmlTextWriter(outFile, Encoding.UTF8) { Formatting = Formatting.Indented, Indentation = 4 }; xml.WriteStartDocument(); xml.WriteProcessingInstruction("xml-stylesheet", "href=\"styles/normalize.css\""); xml.WriteProcessingInstruction("xml-stylesheet", "href=\"styles/style.css\""); xml.WriteStartElement("document"); } foreach (var processedFile in _processedFiles) { var o = GetCsvFormat(processedFile); try { foreach (var dateTimeOffset in processedFile.LastRunTimes) { var t = new CsvOutTl(); var exePath = processedFile.Filenames.FirstOrDefault( y => y.EndsWith(processedFile.Header.ExecutableFilename)); if (exePath == null) { exePath = processedFile.Header.ExecutableFilename; } t.ExecutableName = exePath; t.RunTime = dateTimeOffset.ToString(_fluentCommandLineParser.Object.DateTimeFormat); csvTl?.WriteRecord(t); } } catch (Exception ex) { _logger.Error( $"Error getting time line record for '{processedFile.SourceFilename}' to '{_fluentCommandLineParser.Object.CsvDirectory}'. Error: {ex.Message}"); } try { csv?.WriteRecord(o); } catch (Exception ex) { _logger.Error( $"Error writing CSV record for '{processedFile.SourceFilename}' to '{_fluentCommandLineParser.Object.CsvDirectory}'. Error: {ex.Message}"); } if (_fluentCommandLineParser.Object.JsonDirectory?.Length > 0) { SaveJson(processedFile, _fluentCommandLineParser.Object.JsonPretty, _fluentCommandLineParser.Object.JsonDirectory); } //XHTML xml?.WriteStartElement("Container"); xml?.WriteElementString("SourceFile", o.SourceFilename); xml?.WriteElementString("SourceCreated", o.SourceCreated); xml?.WriteElementString("SourceModified", o.SourceModified); xml?.WriteElementString("SourceAccessed", o.SourceAccessed); xml?.WriteElementString("LastRun", o.LastRun); xml?.WriteElementString("PreviousRun0", $"{o.PreviousRun0}"); xml?.WriteElementString("PreviousRun1", $"{o.PreviousRun1}"); xml?.WriteElementString("PreviousRun2", $"{o.PreviousRun2}"); xml?.WriteElementString("PreviousRun3", $"{o.PreviousRun3}"); xml?.WriteElementString("PreviousRun4", $"{o.PreviousRun4}"); xml?.WriteElementString("PreviousRun5", $"{o.PreviousRun5}"); xml?.WriteElementString("PreviousRun6", $"{o.PreviousRun6}"); xml?.WriteStartElement("ExecutableName"); xml?.WriteAttributeString("title", "Note: The name of the executable tracked by the pf file"); xml?.WriteString(o.ExecutableName); xml?.WriteEndElement(); xml?.WriteElementString("RunCount", $"{o.RunCount}"); xml?.WriteStartElement("Size"); xml?.WriteAttributeString("title", "Note: The size of the executable in bytes"); xml?.WriteString(o.Size); xml?.WriteEndElement(); xml?.WriteStartElement("Hash"); xml?.WriteAttributeString("title", "Note: The calculated hash for the pf file that should match the hash in the source file name"); xml?.WriteString(o.Hash); xml?.WriteEndElement(); xml?.WriteStartElement("Version"); xml?.WriteAttributeString("title", "Note: The operating system that generated the prefetch file"); xml?.WriteString(o.Version); xml?.WriteEndElement(); xml?.WriteElementString("Note", o.Note); xml?.WriteElementString("Volume0Name", o.Volume0Name); xml?.WriteElementString("Volume0Serial", o.Volume0Serial); xml?.WriteElementString("Volume0Created", o.Volume0Created); xml?.WriteElementString("Volume1Name", o.Volume1Name); xml?.WriteElementString("Volume1Serial", o.Volume1Serial); xml?.WriteElementString("Volume1Created", o.Volume1Created); xml?.WriteStartElement("Directories"); xml?.WriteAttributeString("title", "A comma separated list of all directories accessed by the executable"); xml?.WriteString(o.Directories); xml?.WriteEndElement(); xml?.WriteStartElement("FilesLoaded"); xml?.WriteAttributeString("title", "A comma separated list of all files that were loaded by the executable"); xml?.WriteString(o.FilesLoaded); xml?.WriteEndElement(); xml?.WriteEndElement(); } //Close CSV stuff streamWriter?.Flush(); streamWriter?.Close(); streamWriterTl?.Flush(); streamWriterTl?.Close(); //Close XML xml?.WriteEndElement(); xml?.WriteEndDocument(); xml?.Flush(); } catch (Exception ex) { _logger.Error($"Error exporting data! Error: {ex.Message}"); } } }
private void App_OnDispatcherUnhandledException(object sender, DispatcherUnhandledExceptionEventArgs e) { Logger.Warn(e.Exception.Message); }
/// <summary> /// Сохранение сущности в RX. /// </summary> /// <param name="shift">Сдвиг по горизонтали в XLSX документе. Необходим для обработки документов, составленных из элементов разных сущностей.</param> /// <param name="logger">Логировщик.</param> /// <returns>Число запрашиваемых параметров.</returns> public override IEnumerable <Structures.ExceptionsStruct> SaveToRX(NLog.Logger logger, bool supplementEntity, int shift = 0) { var exceptionList = new List <Structures.ExceptionsStruct>(); using (var session = new Session()) { var regNumber = this.Parameters[shift + 0]; var regDate = DateTime.MinValue; var style = NumberStyles.Number | NumberStyles.AllowCurrencySymbol; var culture = CultureInfo.CreateSpecificCulture("en-GB"); var regDateDouble = 0.0; if (!string.IsNullOrWhiteSpace(this.Parameters[shift + 1]) && !double.TryParse(this.Parameters[shift + 1].Trim(), style, culture, out regDateDouble)) { var message = string.Format("Не удалось обработать дату регистрации \"{0}\".", this.Parameters[shift + 1]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } else { if (!string.IsNullOrEmpty(this.Parameters[shift + 1].ToString())) { regDate = DateTime.FromOADate(regDateDouble); } } var regNumberLeadingDocument = this.Parameters[shift + 2]; var regDateLeadingDocument = DateTime.MinValue; var regDateLeadingDocumentDouble = 0.0; if (!string.IsNullOrWhiteSpace(this.Parameters[shift + 3]) && !double.TryParse(this.Parameters[shift + 3].Trim(), style, culture, out regDateLeadingDocumentDouble)) { var message = string.Format("Не удалось обработать дату регистрации ведущего документа \"{0}\".", this.Parameters[shift + 3]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } else { if (!string.IsNullOrEmpty(this.Parameters[shift + 3].ToString())) { regDateLeadingDocument = DateTime.FromOADate(regDateLeadingDocumentDouble); } } var counterparty = BusinessLogic.GetConterparty(session, this.Parameters[shift + 4], exceptionList, logger); if (counterparty == null) { var message = string.Format("Не найден контрагент \"{0}\".", this.Parameters[shift + 4]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var documentKind = BusinessLogic.GetDocumentKind(session, this.Parameters[shift + 5], exceptionList, logger); if (documentKind == null) { var message = string.Format("Не найден вид документа \"{0}\".", this.Parameters[shift + 5]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var subject = this.Parameters[shift + 6]; var businessUnit = BusinessLogic.GetBusinessUnit(session, this.Parameters[shift + 7], exceptionList, logger); if (businessUnit == null) { var message = string.Format("Не найдено подразделение \"{0}\".", this.Parameters[shift + 7]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var department = BusinessLogic.GetDepartment(session, this.Parameters[shift + 8], exceptionList, logger); if (department == null) { var message = string.Format("Не найдено подразделение \"{0}\".", this.Parameters[shift + 8]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var filePath = this.Parameters[shift + 9]; var validFrom = DateTime.MinValue; var validFromDouble = 0.0; if (!string.IsNullOrWhiteSpace(this.Parameters[shift + 10]) && !double.TryParse(this.Parameters[shift + 10].Trim(), style, culture, out validFromDouble)) { var message = string.Format("Не удалось обработать значение в поле \"Действует с\" \"{0}\".", this.Parameters[shift + 10]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } else { if (!string.IsNullOrEmpty(this.Parameters[shift + 10].ToString())) { validFrom = DateTime.FromOADate(validFromDouble); } //else //validFrom = null; } var validTill = DateTime.MinValue; var validTillDouble = 0.0; if (!string.IsNullOrWhiteSpace(this.Parameters[shift + 11]) && !double.TryParse(this.Parameters[shift + 11].Trim(), style, culture, out validTillDouble)) { var message = string.Format("Не удалось обработать значение в поле \"Действует по\" \"{0}\".", this.Parameters[shift + 11]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } else { if (!string.IsNullOrEmpty(this.Parameters[shift + 11].ToString())) { validTill = DateTime.FromOADate(validTillDouble); } //else //validTill = null; } var totalAmount = 0.0; if (!string.IsNullOrWhiteSpace(this.Parameters[shift + 12]) && !double.TryParse(this.Parameters[shift + 12].Trim(), style, culture, out totalAmount)) { var message = string.Format("Не удалось обработать значение в поле \"Сумма\" \"{0}\".", this.Parameters[shift + 12]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var currency = BusinessLogic.GetCurrency(session, this.Parameters[shift + 13], exceptionList, logger); if (!string.IsNullOrEmpty(this.Parameters[shift + 13].Trim()) && currency == null) { var message = string.Format("Не найдено соответствующее наименование валюты \"{0}\".", this.Parameters[shift + 13]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var lifeCycleState = BusinessLogic.GetPropertyLifeCycleState(session, this.Parameters[shift + 14]); if (!string.IsNullOrEmpty(this.Parameters[shift + 14].Trim()) && lifeCycleState == null) { var message = string.Format("Не найдено соответствующее значение состояния \"{0}\".", this.Parameters[shift + 14]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var responsibleEmployee = BusinessLogic.GetEmployee(session, this.Parameters[shift + 15].Trim(), exceptionList, logger); if (!string.IsNullOrEmpty(this.Parameters[shift + 15].Trim()) && responsibleEmployee == null) { var message = string.Format("Не найден Ответственный \"{3}\". Доп. соглашение: \"{0} {1} {2}\". ", regNumber, regDate.ToString(), counterparty, this.Parameters[shift + 15].Trim()); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Warn, Message = message }); logger.Warn(message); } var ourSignatory = BusinessLogic.GetEmployee(session, this.Parameters[shift + 16].Trim(), exceptionList, logger); if (!string.IsNullOrEmpty(this.Parameters[shift + 16].Trim()) && ourSignatory == null) { var message = string.Format("Не найден Подписывающий \"{3}\". Доп. соглашение: \"{0} {1} {2}\". ", regNumber, regDate.ToString(), counterparty, this.Parameters[shift + 16].Trim()); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Warn, Message = message }); logger.Warn(message); } var note = this.Parameters[shift + 17]; try { var supAgreements = Enumerable.ToList(session.GetAll <Sungero.Contracts.ISupAgreement>().Where(x => x.RegistrationNumber == regNumber)); var supAgreement = (Enumerable.FirstOrDefault <Sungero.Contracts.ISupAgreement>(supAgreements)); if (supAgreement != null) { var message = string.Format("Доп.соглашение не может быть импортировано. Найден дубль с такими же реквизитами \"Дата документа\" {0} и \"Рег. №\" {1}.", regDate.ToString("d"), regNumber); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var contracts = Enumerable.ToList(session.GetAll <Sungero.Contracts.IContract>().Where(x => x.RegistrationDate == regDateLeadingDocument && x.RegistrationNumber == regNumberLeadingDocument)); var leadingDocument = (Enumerable.FirstOrDefault <Sungero.Contracts.IContract>(contracts)); if (leadingDocument == null) { var message = string.Format("Доп.соглашение не может быть импортировано. Не найден ведущий документ с реквизитами \"Дата документа\" {0} и \"Рег. №\" {1}.", regDateLeadingDocument.ToString("d"), regNumberLeadingDocument); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } // HACK: Создаем 2 сессии. В первой загружаем данные, во второй создаем объект системы. supAgreement = session.Create <Sungero.Contracts.ISupAgreement>(); session.Clear(); session.Dispose(); supAgreement.LeadingDocument = leadingDocument; using (var session1 = new Session()) { supAgreement.Counterparty = counterparty; if (regDate != DateTime.MinValue) { supAgreement.RegistrationDate = regDate; } supAgreement.RegistrationNumber = regNumber; supAgreement.DocumentKind = documentKind; supAgreement.Subject = subject; supAgreement.BusinessUnit = businessUnit; supAgreement.Department = department; if (validFrom != DateTime.MinValue) { supAgreement.ValidFrom = validFrom; } if (validTill != DateTime.MinValue) { supAgreement.ValidTill = validTill; } supAgreement.TotalAmount = totalAmount; supAgreement.Currency = currency; supAgreement.LifeCycleState = lifeCycleState; supAgreement.ResponsibleEmployee = responsibleEmployee; supAgreement.OurSignatory = ourSignatory; supAgreement.Note = note; supAgreement.Save(); session1.SubmitChanges(); } if (!string.IsNullOrWhiteSpace(filePath)) { exceptionList.Add(BusinessLogic.ImportBody(session, supAgreement, filePath, logger)); } var documentRegisterId = 0; if (ExtraParameters.ContainsKey("doc_register_id")) { if (int.TryParse(ExtraParameters["doc_register_id"], out documentRegisterId)) { exceptionList.AddRange(BusinessLogic.RegisterDocument(session, supAgreement, documentRegisterId, regNumber, regDate, Constants.RolesGuides.RoleContractResponsible, logger)); } else { var message = string.Format("Не удалось обработать параметр \"doc_register_id\". Полученное значение: {0}.", ExtraParameters["doc_register_id"]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } } } catch (Exception ex) { exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = ex.Message }); return(exceptionList); } } return(exceptionList); }
private async Task GetReadyWS() { if (KKK.IsClientReady != true) { Console.WriteLine("Waiting for client to be ready..."); //Dont start events until discord api is ready } do { // nothing, just pauses the tasks until discord is ready } while (KKK.IsClientReady != true); Console.WriteLine("It's ready, lets start, shall we?"); DatabaseContext context = new DatabaseContext(); #if RELEASE var server = new WebSocketServer("wss://0.0.0.0:8181"); var config = BuildConfig(); server.Certificate = new X509Certificate2(config["certPath"], config["certPassword"]); #elif DEBUG var server = new WebSocketServer("ws://0.0.0.0:8181"); #endif server.Start(socket => { socket.OnOpen = () => { if (allSockets.Any(client => client.ConnectionInfo.ClientIpAddress == socket.ConnectionInfo.ClientIpAddress)) { var socket2 = allSockets.Find(client => client.ConnectionInfo.ClientIpAddress == socket.ConnectionInfo.ClientIpAddress); try { allSockets.Remove(socket2); } catch (Exception ex) { logger.Warn(ex.ToString(), "An error occured."); } //Little security, dont let same ip to connect twice allSockets.Add(socket); } else { try { allSockets.Remove(socket); } catch (Exception ex) { logger.Warn(ex.ToString(), "An error occured."); } allSockets.Add(socket); } }; socket.OnClose = () => { Console.WriteLine("Closed connection: " + socket.ConnectionInfo.ClientIpAddress); allSockets.Remove(socket); if (context.Auth.Any(o => o.IP == socket.ConnectionInfo.ClientIpAddress)) { string token = context.Auth.AsQueryable().Where(a => a.IP == socket.ConnectionInfo.ClientIpAddress).Single().Token; if (AliveTokens.Contains(token)) { AliveTokens.Remove(token); logger.Info($"{socket.ConnectionInfo.ClientIpAddress} Removed from alive tokens list"); } } }; socket.OnMessage = message => { Task.Run(async() => { try { #if DEBUG Console.WriteLine($"Message Received: {message}"); #endif string[] codes = message.Split('|'); switch (codes[0]) { case "login": string token = codes[1]; if (context.Auth.Any(o => o.Token == token)) { if (!AliveTokens.Contains(context.Auth.AsQueryable().Where(a => a.IP == socket.ConnectionInfo.ClientIpAddress).Single().Token)) { try { AliveTokens.Add(token); Console.WriteLine( $"{socket.ConnectionInfo.ClientIpAddress}:{token} Added to alive tokens list"); #if DEBUG Console.WriteLine("Here is Serverid: " + context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid); #endif if (KKK.Client.GetGuild(context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid) != null) { var guild = KKK.Client.GetGuild( context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid); await socket.Send($"status|Linked|{guild.Name}"); } else { await socket.Send($"status|Linked|null"); } if (context.Notify.Any(o => o.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid)) { await socket.Send($"subscribe|playerEvent"); } if (context.OnJoin.Any(o => (o.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid) && o.sevent == 1)) { await socket.Send($"subscribe|serverListEvent"); } } catch (Exception ex) { logger.Warn(ex.ToString(), "An error occured."); } } } else if (!(context.Auth.Any(o => o.Token == token))) { if (!(context.OnHold.Any(o => o.IP == socket.ConnectionInfo.ClientIpAddress))) { using (var contextt = new DatabaseContext()) { OnHold newonhold = new OnHold(); newonhold.IP = socket.ConnectionInfo.ClientIpAddress; newonhold.Token = token; contextt.Add(newonhold); contextt.SaveChanges(); } } else if ((context.OnHold.Any(o => o.IP == socket.ConnectionInfo.ClientIpAddress))) { var onhold = context.OnHold.First(a => a.IP == socket.ConnectionInfo.ClientIpAddress); onhold.Token = token; context.SaveChanges(); } Console.WriteLine("Token: " + token + $" IP: {socket.ConnectionInfo.ClientIpAddress} Added to onhold list"); await socket.Send("status|OnHold"); } break; case "notify": if (AliveTokens.Contains(context.Auth.AsQueryable().Where(a => a.IP == socket.ConnectionInfo.ClientIpAddress).Single().Token)) { await Task.Run(async() => { try { string servername = codes[1]; string discordname = codes[2]; string channelid = codes[3]; string messageid = codes[4]; string eventt = codes[5]; string result = codes[6]; switch (eventt) { case "stop": switch (result) { //notify|{servername}|{discordname}|{channelid}|{messageid}|{eventt}|400|this is a test case "20": //ok await BotTools.NotificationControlAsync( ulong.Parse(messageid), ulong.Parse(channelid), $"Your `{servername}` stopped successfully, command was executed by `{discordname}`", int.Parse(result)); break; case "40": //its stopped already await BotTools.NotificationControlAsync( ulong.Parse(messageid), ulong.Parse(channelid), $"Your `{servername}` is stopped already, command was executed by `{discordname}`", int.Parse(result)); break; case "44": //server not found await BotTools.NotificationControlAsync( ulong.Parse(messageid), ulong.Parse(channelid), $"I couldnt find your `{servername}` server, please make sure you typed the right name, command was executed by `{discordname}`", int.Parse(result)); break; } break; case "start": switch (result) { //notify|{servername}|{discordname}|{channelid}|{messageid}|{eventt}|400|this is a test case "20": //ok await BotTools.NotificationControlAsync( ulong.Parse(messageid), ulong.Parse(channelid), $"Your `{servername}` server is starting.. , command was executed by `{discordname}`", int.Parse(result)); break; case "21": //ok await BotTools.NotificationControlAsync( ulong.Parse(messageid), ulong.Parse(channelid), $"Your `{servername}` started successfully, command was executed by `{discordname}`", int.Parse(result)); break; case "40": //its stopped already await BotTools.NotificationControlAsync( ulong.Parse(messageid), ulong.Parse(channelid), $"Your `{servername}` is running already, command was executed by `{discordname}`", int.Parse(result)); break; case "44": //server not found await BotTools.NotificationControlAsync( ulong.Parse(messageid), ulong.Parse(channelid), $"I couldnt find your `{servername}` server, please make sure you typed the right name, command was executed by `{discordname}`", int.Parse(result)); break; } break; } } catch (Exception ex) { logger.Warn($"Couldn't process request: {ex.Message}"); } }); } break; case "event": await Task.Run(async() => { string eventname = codes[1]; string servername = codes[2]; string playername = codes[3]; try { string token = context.Auth.AsQueryable().Where(a => a.IP == socket.ConnectionInfo.ClientIpAddress).Single().Token; if (AliveTokens.Contains(token)) { if (KKK.Client.GetGuild(context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid) != null) { if (eventname.StartsWith("p") && context.Notify.AsQueryable().Any(x => x.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid) && KKK.Client.GetGuild(context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid) .GetChannel( context.Notify.AsQueryable().Where(a => a.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid).Single().Channelid) != null) { switch (eventname) { case "pjoin": await BotTools.NotificationControlAsync(0, context.Notify.AsQueryable().Where(a => a.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid).Single().Channelid, $"***{playername}*** Just joined ***{servername}***", 0, 1); break; case "pleave": await BotTools.NotificationControlAsync(0, context.Notify.AsQueryable().Where(a => a.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid).Single().Channelid, $"***{playername}*** Just left ***{servername}***", 0, 1); break; } } else if (eventname.StartsWith("serverList") && context.OnJoin.Any(o => (o.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid) && o.sevent == 1) && KKK.Client .GetGuild(context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid) .GetChannel( context.OnJoin.AsQueryable().Where(a => a.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid).Single().Channelid) != null) { await Task.Run(async() => { try { if (AliveTokens.Contains(token) && (context.OnJoin.Any(o => (o.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid) && o.sevent == 1))) { ulong guild = context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid; if (KKK.Client.GetGuild(guild) .GetChannel( context.OnJoin.AsQueryable().Where(a => a.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid).Single().Channelid) != null) { if (context.OnJoin.AsQueryable().Where(a => a.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid).Single().Messageid != 0) { IMessageChannel chan = (IMessageChannel)KKK.Client.GetChannel(context.OnJoin.AsQueryable().Where(a => a.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid).Single().Channelid); IUserMessage msg = (IUserMessage)await chan.GetMessageAsync(context.OnJoin.AsQueryable().Where(a => a.Serverid == context.Auth.AsQueryable().Where(a => a.Token == token).Single().Serverid).Single().Messageid); if (msg != null) { await msg.ModifyAsync(msgProperty => { msgProperty.Embed = BuildServerListEmbed(message); }); } else { var msgg = await chan.SendMessageAsync(null, false, BotTools.Embed( "Dont remove this message, this message will be updated continuously", 20)); var server = context.OnJoin.First(a => a.Serverid == guild); server.Messageid = msgg.Id; server.sevent = 1; context.SaveChanges(); await msgg.ModifyAsync(msgProperty => { msgProperty.Embed = BuildServerListEmbed(message); }); } } } } } catch (Exception ex) { Console.WriteLine("Exception occured: " + ex.ToString()); } }); } } } } catch (Exception ex) { Console.WriteLine("Exception occured: " + ex.ToString()); } }); break; default: // ban ip in case gets to x requests To-DO Console.WriteLine($"Someone is trying to troll here, invalid packet: {message}"); break; } } catch (Exception ex) { Console.WriteLine(ex.ToString()); } }); //Console.WriteLine(message); //allSockets.ToList().ForEach(s => s.Send(message)); }; }); var input = Console.ReadLine(); while (input != "exit") { //foreach (var socket in allSockets.ToList()) //{ // await socket.Send(input); //} //input = Console.ReadLine(); } }
private static void Pack(string id, string apiKey, string outputFilename = null, string outputDirectory = null, string vertexShader = null, bool overrideOutput = true) { string apiUrl = ApiUrl.Replace("{id}", id).Replace("{apiKey}", apiKey); string json = Http.Get(apiUrl); JToken jRoot = JToken.Parse(json); JToken root = jRoot["Shader"]; string baseDirectory = null; bool deleteOnDispose = true; #if DEBUG baseDirectory = "."; deleteOnDispose = false; #endif using (TempDirectory tempDirectory = Util.CreateTempDirectory(baseDirectory, deleteOnDispose)) { DirectoryInfo assetsFolder = Directory.CreateDirectory(Path.Combine(tempDirectory, "assets")); #if DEBUG // Write full JSON response. Logger.Info("Writing full JSON response..."); File.WriteAllText(Path.Combine(tempDirectory, "response.json"), jRoot.ToString(Formatting.Indented)); #endif // Write info file. Logger.Info("Writing info file..."); File.WriteAllText(Path.Combine(tempDirectory, "info.json"), root["info"].ToString(Formatting.Indented)); // Write default vertex shader. Logger.Info("Writing shared vertex shader..."); File.WriteAllText(Path.Combine(tempDirectory, $"shared.vs.glsl"), vertexShader, Encoding.UTF8); // Write render passes. Logger.Info("Writing render passes..."); int index = 0; foreach (JToken renderPass in root["renderpass"]) { string name = renderPass["name"].Value <string>(); string description = renderPass["description"].Value <string>(); string type = renderPass["type"].Value <string>(); if (!(type == "buffer" || type == "common" || type == "image")) { Logger.Warn(" - Unknown pass type '{0}' for render pass '{1}'", type, name); continue; } // Write GLSL shader file(s)... string renderPassName = string.Empty; if (type == "common") { index--; Logger.Debug(" - Writing common shader - #{0} {1} ({2})...", index, name, type); renderPassName = "common"; } else { Logger.Debug(" - Writing render pass shader - #{0} {1} ({2})...", index, name, type); renderPassName = Util.MakeValidFileName($"rp{index}-{name.Replace(" ", "_")}.fs").ToLower(); } string glslFilename = $"{renderPassName}.glsl"; string code = renderPass["code"].Value <string>(); File.WriteAllText(Path.Combine(tempDirectory, glslFilename), code, Encoding.UTF8); if (type != "common") { Logger.Debug(" - Processing inputs..."); List <JObject> inputs = new List <JObject>(); foreach (JToken input in renderPass["inputs"]) { int inputId = input["id"].Value <int>(); string inputSrc = input["src"].Value <string>(); string inputType = input["ctype"].Value <string>(); int inputChannel = input["channel"].Value <int>(); if (inputType == "buffer") { inputs.Add(new JObject { { "id", inputId }, { "type", inputType }, { "channel", inputChannel }, { "sampler", new JObject { { "filter", input["sampler"]["filter"].Value <string>() }, { "wrap", input["sampler"]["wrap"].Value <string>() } } } }); } else if (inputType == "texture") { // TODO: Download texture asset... string srcUrl = ApiAssetUrl.Replace("{stub}", inputSrc); Logger.Info(" - Fetching texture asset {0}", srcUrl); int lastSlashOfSrc = inputSrc.LastIndexOf('/'); string assetFilename = $"{inputSrc.Substring(lastSlashOfSrc + 1)}"; string assetFilepath = Path.Combine(assetsFolder.FullName, assetFilename); Http.GetFile(srcUrl, assetFilepath); inputs.Add(new JObject { { "id", inputId }, { "type", inputType }, { "src", $"assets/{assetFilename}" }, { "channel", inputChannel }, { "sampler", new JObject { { "filter", input["sampler"]["filter"].Value <string>() }, { "wrap", input["sampler"]["wrap"].Value <string>() }, { "vflip", input["sampler"]["vflip"].Value <bool>() } } } }); } else if (inputType == "volume") { //inputs.Add(new JObject { // { "id", inputId }, // { "type" , inputType }, // { "channel", inputChannel }, //}); Logger.Warn(" - Unknown channel type '{0}' for render pass '{1}'", inputType, name); } else { Logger.Warn(" - Unknown channel type '{0}' for render pass '{1}'", inputType, name); } } Logger.Debug(" - Processing outputs..."); IEnumerable <int> outputs = renderPass["outputs"].Select(x => x["id"].Value <int>()); Logger.Debug(" - Writing configuration file..."); string configFilename = $"{renderPassName}.json"; File.WriteAllText(Path.Combine(tempDirectory, configFilename), JsonConvert.SerializeObject(new { name, description, type, code = glslFilename, inputs, outputs }, Formatting.Indented), Encoding.UTF8); } index++; } // Zip directory.. Logger.Info("Packing..."); string packExtension = "dpst"; #if DEBUG packExtension = "zip"; #endif if (string.IsNullOrEmpty(outputFilename)) { outputFilename = $"{Util.MakeValidFileName(root["info"]["name"].Value<string>())}.{packExtension}"; } else { outputFilename = $"{outputFilename}.{packExtension}"; } if (outputDirectory == null || !Directory.Exists(outputDirectory)) { outputDirectory = Directory.GetCurrentDirectory(); } string zipFile = Path.Combine(outputDirectory, outputFilename); if (File.Exists(zipFile)) { if (overrideOutput) { Logger.Warn("Overwriting output pack file '{0}'", outputFilename); File.Delete(zipFile); } else { Logger.Error("A pack file already exists with the output filename '{0}'", outputFilename); } } Logger.Info("Packing to '{0}'", outputFilename); ZipFile.CreateFromDirectory(tempDirectory, zipFile, CompressionLevel.Optimal, false, Encoding.UTF8); Logger.Info("Done."); } }
public void DownloadMessage(string uidl, string destination_path) { var pop = MailClientBuilder.Pop(); try { _log.Debug("Connecting to {0}", Account.EMail); switch (Account.IncomingEncryptionType) { case EncryptionType.StartTLS: pop.ConnectTLS(Account.Server, Account.Port); break; case EncryptionType.SSL: pop.ConnectSsl(Account.Server, Account.Port); break; case EncryptionType.None: pop.Connect(Account.Server, Account.Port); break; } if (Account.AuthenticationTypeIn == SaslMechanism.Login) { pop.Login(Account.Account, Account.Password, Account.Server); } else { pop.Authenticate(Account.Account, Account.Password, Account.AuthenticationTypeIn); } _log.Debug("GetCAPA()"); GetCAPA(pop); _log.Info("Account: MessagesCount={0}, TotalSize={1}, UIDL={2}, LoginDelay={3}", pop.MessageCount, pop.TotalSize, IsUidlSupported, Account.ServerLoginDelay); if (pop.UniqueIdExists(uidl)) { _log.Info("Message with this uidl exists!"); var index = pop.GetMessageIndex(uidl); _log.Info("StoreMessage(index: {0})", index); pop.StoreMessage(index, false, destination_path); if (File.Exists(destination_path)) { _log.Info("Message stored successfully!\r\n"); } else { _log.Error("Message is missing in destination path!\r\n"); } } else { _log.Info("Message with this uidl not exists!\r\n"); } } catch (Pop3Exception e) { if (e.Command.StartsWith("USER") || e.Command.StartsWith("PASS")) { if (OnAuthFailed != null) { OnAuthFailed(Account, e.Response); } _log.Warn("Retrieve() Pop3: {0} Port: {1} Account: '{2}' ErrorMessage:\r\n{3}\r\n", Account.Server, Account.Port, Account.Account, e.Message); } else { _log.Error("Retrieve() Pop3: {0} Port: {1} Account: '{2}' ErrorMessage:\r\n{3}\r\n", Account.Server, Account.Port, Account.Account, e.ToString()); } throw; } finally { try { if (pop.IsConnected) { pop.Disconnect(); } } catch { } } }
public void Warn <T>(T value) { _logger.Warn(value); }
public void LogWarning(string message) { _logger.Warn(message); }
/// <summary> /// Сохранение сущности в RX. /// </summary> /// <param name="shift">Сдвиг по горизонтали в XLSX документе. Необходим для обработки документов, составленных из элементов разных сущностей.</param> /// <param name="logger">Логировщик.</param> /// <returns>Число запрашиваемых параметров.</returns> public override IEnumerable <Structures.ExceptionsStruct> SaveToRX(NLog.Logger logger, bool supplementEntity, int shift = 0) { var exceptionList = new List <Structures.ExceptionsStruct>(); using (var session = new Session()) { var regNumber = this.Parameters[shift + 0]; var regDate = DateTime.MinValue; var style = NumberStyles.Number | NumberStyles.AllowCurrencySymbol; var culture = CultureInfo.CreateSpecificCulture("en-GB"); var regDateDouble = 0.0; if (!string.IsNullOrWhiteSpace(this.Parameters[shift + 1]) && !double.TryParse(this.Parameters[shift + 1].Trim(), style, culture, out regDateDouble)) { var message = string.Format("Не удалось обработать дату регистрации \"{0}\".", this.Parameters[shift + 1]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } else { if (!string.IsNullOrEmpty(this.Parameters[shift + 1].ToString())) { regDate = DateTime.FromOADate(regDateDouble); } } var counterparty = BusinessLogic.GetConterparty(session, this.Parameters[shift + 2], exceptionList, logger); if (counterparty == null) { var message = string.Format("Не найден контрагент \"{0}\".", this.Parameters[shift + 2]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var documentKind = BusinessLogic.GetDocumentKind(session, this.Parameters[shift + 3], exceptionList, logger); if (documentKind == null) { var message = string.Format("Не найден вид документа \"{0}\".", this.Parameters[shift + 3]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var subject = this.Parameters[shift + 4]; var department = BusinessLogic.GetDepartment(session, this.Parameters[shift + 5], exceptionList, logger); if (department == null) { var message = string.Format("Не найдено подразделение \"{0}\".", this.Parameters[shift + 5]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } var filePath = this.Parameters[shift + 6]; var dated = DateTime.MinValue; var datedDouble = 0.0; if (!string.IsNullOrWhiteSpace(this.Parameters[shift + 7]) && !double.TryParse(this.Parameters[shift + 7].Trim(), style, culture, out datedDouble)) { var message = string.Format("Не удалось обработать значение в поле \"Письмо от\" \"{0}\".", this.Parameters[shift + 7]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } else { if (!string.IsNullOrEmpty(this.Parameters[shift + 7].ToString())) { dated = DateTime.FromOADate(datedDouble); } } var inNumber = this.Parameters[shift + 8]; var addressee = BusinessLogic.GetEmployee(session, this.Parameters[shift + 9].Trim(), exceptionList, logger); if (!string.IsNullOrEmpty(this.Parameters[shift + 9].Trim()) && addressee == null) { var message = string.Format("Не найден Адресат \"{2}\". Входящее письмо: \"{0} {1}\". ", regNumber, regDate.ToString(), this.Parameters[shift + 9].Trim()); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Warn, Message = message }); logger.Warn(message); } var deliveryMethod = BusinessLogic.GetMailDeliveryMethod(session, this.Parameters[shift + 10].Trim(), exceptionList, logger); if (!string.IsNullOrEmpty(this.Parameters[shift + 10].Trim()) && deliveryMethod == null) { var message = string.Format("Не найден Способ доставки \"{2}\". Входящее письмо: \"{0} {1}\". ", regNumber, regDate.ToString(), this.Parameters[shift + 10].Trim()); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Warn, Message = message }); logger.Warn(message); } var note = this.Parameters[shift + 11]; try { var incomingLetters = Enumerable.ToList(session.GetAll <Sungero.RecordManagement.IIncomingLetter>().Where(x => x.RegistrationNumber == regNumber && regDate != DateTime.MinValue && x.RegistrationDate == regDate)); var incomingLetter = (Enumerable.FirstOrDefault <Sungero.RecordManagement.IIncomingLetter>(incomingLetters)); if (incomingLetter != null) { var message = string.Format("Входящее письмо не может быть импортировано. Найден дубль с такими же реквизитами \"Дата документа\" {0} и \"Рег. №\" {1}.", regDate.ToString("d"), regNumber); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } incomingLetter = session.Create <Sungero.RecordManagement.IIncomingLetter>(); if (regDate != DateTime.MinValue) { incomingLetter.RegistrationDate = regDate; } incomingLetter.RegistrationNumber = regNumber; incomingLetter.Correspondent = counterparty; incomingLetter.DocumentKind = documentKind; incomingLetter.Subject = subject; incomingLetter.Department = department; if (department != null) { incomingLetter.BusinessUnit = department.BusinessUnit; } if (dated != DateTime.MinValue) { incomingLetter.Dated = dated; } incomingLetter.InNumber = inNumber; incomingLetter.Addressee = addressee; incomingLetter.DeliveryMethod = deliveryMethod; incomingLetter.Note = note; incomingLetter.Save(); if (!string.IsNullOrWhiteSpace(filePath)) { exceptionList.Add(BusinessLogic.ImportBody(session, incomingLetter, filePath, logger)); } var documentRegisterId = 0; if (ExtraParameters.ContainsKey("doc_register_id")) { if (int.TryParse(ExtraParameters["doc_register_id"], out documentRegisterId)) { exceptionList.AddRange(BusinessLogic.RegisterDocument(session, incomingLetter, documentRegisterId, regNumber, regDate, Constants.RolesGuides.RoleIncomingDocumentsResponsible, logger)); } else { var message = string.Format("Не удалось обработать параметр \"doc_register_id\". Полученное значение: {0}.", ExtraParameters["doc_register_id"]); exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = message }); logger.Error(message); return(exceptionList); } } } catch (Exception ex) { exceptionList.Add(new Structures.ExceptionsStruct { ErrorType = Constants.ErrorTypes.Error, Message = ex.Message }); return(exceptionList); } session.SubmitChanges(); } return(exceptionList); }
/// <summary> /// Inserts the EUI if not in table /// </summary> /// <param name="eui">EUI</param> /// <returns>ID</returns> public static int InsertEUI(long eui) { try { using (ManufacturingStoreEntities cx = new ManufacturingStoreEntities()) { // Get production site string mac = StationSetupUtility.GetMacAddress(); var production_stie_id = cx.StationSites.Where(s => s.StationMac == mac).Single().ProductionSiteId; if (production_stie_id == 0) { _logger.Warn($"Invalid production site id: {production_stie_id} for MAC: {mac}"); } // db eui is a string string euistr = eui.ToString("X16"); // Form query for the eui var q = cx.EuiLists.Where(e => e.EUI == euistr); if (q.Any()) { return(q.Single().Id); // I'm not sure we care what site previously coded the device // So I'm removing this for now... // Checking for a valid site id maybe better... // Check is the same site id //int db_site_id = q.Single().ProductionSiteId; //if (db_site_id != production_stie_id) //{ // string msg = $"EUI {euistr} already in db with site id = {db_site_id}, this machine is assigned site id {production_stie_id}"; // _logger.Error(msg); // throw new Exception(msg); //} //else //{ // return q.Single().Id; //} } EuiList euiList = new EuiList(); euiList.EUI = eui.ToString("X16"); euiList.ProductionSiteId = production_stie_id; cx.EuiLists.Add(euiList); cx.SaveChanges(); return(q.Single().Id); } } catch (Exception ex) { _logger.Error($"Exception in InsertEUI: {ex.Message}\r\n{ex.StackTrace}"); if (ex.GetBaseException().GetType() == typeof(SqlException)) { // I think I originally added this to skip errors when trying to insert already existing EUI // This should no longer happen as we now check for if already exists...So re-throw it SqlException sx = (SqlException)ex.GetBaseException(); if (sx.Number == 2627) { //Violation of primary key/Unique constraint throw; } else { throw; } } else { throw; } } }
private static void Main(string[] args) { //TODO Live Registry support var dumpWarning = false; var nlogPath = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "nlog.config"); if (File.Exists(nlogPath) == false) { var config = new LoggingConfiguration(); var loglevel = LogLevel.Info; var layout = @"${message}"; var consoleTarget = new ColoredConsoleTarget(); config.AddTarget("console", consoleTarget); consoleTarget.Layout = layout; var rule1 = new LoggingRule("*", loglevel, consoleTarget); config.LoggingRules.Add(rule1); LogManager.Configuration = config; dumpWarning = true; } _logger = LogManager.GetCurrentClassLogger(); if (dumpWarning) { _logger.Warn("Nlog.config missing! Using default values..."); } if (!CheckForDotnet46()) { _logger.Warn(".net 4.6 not detected. Please install .net 4.6 and try again."); return; } var p = new FluentCommandLineParser<ApplicationArguments> { IsCaseSensitive = false }; // p.Setup(arg => arg.HiveFile) // .As("Hive") // .WithDescription("\tHive to search. --Hive or --Dir is required."); p.Setup(arg => arg.Directory) .As("Dir") .WithDescription("\tDirectory to look for hives (recursively)"); // p.Setup(arg => arg.Literal) // .As("Literal") // .WithDescription("\tIf present, --sd and --ss search value will not be interpreted as ASCII or Unicode byte strings"); // p.Setup(arg => arg.RecoverDeleted) // .As("Recover") // .WithDescription("\tIf present, recover deleted keys/values"); // p.Setup(arg => arg.DumpKey) // .As("DumpKey") // .WithDescription("\tDump given key (and all subkeys) and values as json"); // p.Setup(arg => arg.DumpDir) // .As("DumpDir") // .WithDescription("\tDirectory to save json output"); // p.Setup(arg => arg.Recursive) // .As("Recursive") // .WithDescription("Dump keys/values recursively (ignored if --ValueName used). This option provides FULL details about keys and values"); // p.Setup(arg => arg.RegEx) // .As("RegEx") // .WithDescription("\tIf present, treat <string> in --sk, --sv, --sd, and --ss as a regular expression") // .SetDefault(false); // p.Setup(arg => arg.Sort) // .As("Sort") // .WithDescription("\tIf present, sort the output").SetDefault(false); // p.Setup(arg => arg.SuppressData) // .As("SuppressData") // .WithDescription("If present, do not show data when using --sd or --ss\r\n").SetDefault(false); // p.Setup(arg => arg.KeyName) // .As("KeyName") // .WithDescription("\tKey name. All values under this key will be dumped"); // added option by CDI -- begin p.Setup(arg => arg.SaveTo) .As("SaveTo") .WithDescription("\tParsed data will be written to AutoRunParser_Output.csv in this directory"); p.Setup(arg => arg.NoHeader) .As("NoHeader") .WithDescription("\tOutput to CSV without header"); // added option by CDI -- end // p.Setup(arg => arg.ValueName) // .As("ValueName") // .WithDescription("Value name. Only this value will be dumped"); // p.Setup(arg => arg.SaveToName) // .As("SaveToName") // .WithDescription("Saves ValueName value data in binary form to file\r\n"); // p.Setup(arg => arg.StartDate) // .As("StartDate") // .WithDescription("Start date to look for last write timestamps (UTC). If EndDate is not supplied, last writes AFTER this date will be returned"); // p.Setup(arg => arg.EndDate) // .As("EndDate") // .WithDescription("\tEnd date to look for last write timestamps (UTC). If StartDate is not supplied, last writes BEFORE this date will be returned"); // p.Setup(arg => arg.MinimumSize) // .As("MinSize") // .WithDescription("\tFind values with data size >= MinSize (specified in bytes)"); // p.Setup(arg => arg.SimpleSearchKey) // .As("sk") // .WithDescription("\tSearch for <string> in key names."); // p.Setup(arg => arg.SimpleSearchValue) // .As("sv") // .WithDescription("\tSearch for <string> in value names"); // p.Setup(arg => arg.SimpleSearchValueData) // .As("sd") // .WithDescription("\tSearch for <string> in value record's value data"); // p.Setup(arg => arg.SimpleSearchValueSlack) // .As("ss") // .WithDescription("\tSearch for <string> in value record's value slack"); var header = $"AutoRunParser version {Assembly.GetExecutingAssembly().GetName().Version}" + $" modified by CDI, Inc." + "\r\n(Original Author: Eric Zimmerman)" + "\r\n\r\nNote: Enclose all strings containing spaces (and all RegEx) with double quotes"; var footer = @"Example: AutoRunParser.exe --Dir C:\data\ --SaveTo C:\output\"; // var footer = @"Example: RECmd.exe --Hive ""C:\Temp\UsrClass 1.dat"" --sk URL --Recover" + "\r\n\t " + // @"RECmd.exe --Hive ""D:\temp\UsrClass 1.dat"" --StartDate ""11/13/2014 15:35:01"" " + "\r\n\t " + // @"RECmd.exe --Hive ""D:\temp\UsrClass 1.dat"" --RegEx --sv ""(App|Display)Name"" " + "\r\n\t " + // @"RECmd.exe --Hive ""D:\temp\UsrClass 1.dat"" --StartDate ""05/20/2014 19:00:00"" --EndDate ""05/20/2014 23:59:59"" " + "\r\n\t " + // @"RECmd.exe --Hive ""D:\temp\UsrClass 1.dat"" --StartDate ""05/20/2014 07:00:00 AM"" --EndDate ""05/20/2014 07:59:59 PM"" "; p.SetupHelp("?", "help").WithHeader(header).Callback(text => _logger.Info(text + "\r\n" + footer)); var result = p.Parse(args); if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); if (result.ErrorText.Contains("--dir")) { _logger.Error("Remove the trailing backslash from the --dir argument and try again"); } p.HelpOption.ShowHelp(p.Options); return; } var hivesToProcess = new List<string>(); var systemHives = new List<string>(); var softwareHives = new List<string>(); var ntuserHives = new List<string>(); if (p.Object.HiveFile?.Length > 0) { hivesToProcess.Add(p.Object.HiveFile); } else if (p.Object.Directory?.Length > 0) { if (Directory.Exists(p.Object.Directory) == false) { _logger.Error($"Directory '{p.Object.Directory}' does not exist."); return; } // var files = Directory.GetFiles(p.Object.Directory, "*", SearchOption.AllDirectories); foreach (string fileName in Directory.GetFiles(p.Object.Directory, "*", SearchOption.AllDirectories)) { Stream st = File.OpenRead(fileName); if (st.Length < 4) continue; BinaryReader br = new BinaryReader(st); if (br.ReadInt32() != 1718052210) // means not "regf" continue; if (Path.GetFileName(fileName).ToUpper().Contains("SYSTEM")) { systemHives.Add(fileName); hivesToProcess.Add(fileName); } else if (Path.GetFileName(fileName).ToUpper().Contains("SOFTWARE")) { softwareHives.Add(fileName); hivesToProcess.Add(fileName); } else if (Path.GetFileName(fileName).ToUpper().Contains("NTUSER.DAT")) { ntuserHives.Add(fileName); hivesToProcess.Add(fileName); } } } else { p.HelpOption.ShowHelp(p.Options); return; } _logger.Info(header); _logger.Info(""); if (hivesToProcess.Count == 0) { _logger.Warn("No hives were found. Exiting..."); return; } // added by CDI for automation if (p.Object.Directory.Length > 0 && p.Object.SaveTo.Length > 0) { if (Directory.Exists(p.Object.SaveTo) == false) { try { Directory.CreateDirectory(p.Object.SaveTo); } catch (Exception ex) { _logger.Error($"Error creating SaveTo '{p.Object.SaveTo}': {ex.Message}. Exiting"); return; } } var outFileBase = string.Empty; outFileBase = $"AutoRunParser_Output.csv"; var outFileName = Path.Combine(p.Object.SaveTo, outFileBase); var sw = new StreamWriter(outFileName, true, System.Text.Encoding.Unicode); sw.AutoFlush = true; sw.WriteLine("FilePath\tKey\tName\tValue\tLastWrittenTimeLocal\tLastWrittenTimeUTC"); foreach (var systemHive in systemHives) { var reg = new RegistryHive(systemHive) { RecoverDeleted = p.Object.RecoverDeleted }; reg.ParseHive(); // var hive = new RegistryHiveOnDemand(systemHive); var subKey = reg.GetKey("Select"); if (subKey == null) { _logger.Warn($"no SYSTEM hive: {systemHive}"); continue; } var currentCtlSet = int.Parse(subKey.Values.Single(c => c.ValueName == "Current").ValueData); StreamReader cReader = new StreamReader(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location) + @"\system.txt", System.Text.Encoding.Default); // 1行ごとに処理 while (cReader.Peek() >= 0) { string keyName = cReader.ReadLine(); var key = reg.GetKey($@"ControlSet00{currentCtlSet}\{keyName}"); if (key == null) { _logger.Warn($"Key not found: {keyName}"); continue; } WriteSpecificKeyInfo(key, sw, systemHive); } cReader.Close(); } foreach (var softwareHive in softwareHives) { var reg = new RegistryHive(softwareHive) { RecoverDeleted = p.Object.RecoverDeleted }; reg.ParseHive(); StreamReader cReader = new StreamReader(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location) + @"\software.txt", System.Text.Encoding.Default); // 1行ごとに処理 while (cReader.Peek() >= 0) { string keyName = cReader.ReadLine(); var key = reg.GetKey(keyName); if (key == null) { _logger.Warn($"Key not found: {keyName}"); continue; } WriteSpecificKeyInfo(key, sw, softwareHive); } cReader.Close(); } foreach (var ntuserHive in ntuserHives) { var reg = new RegistryHive(ntuserHive) { RecoverDeleted = p.Object.RecoverDeleted }; reg.ParseHive(); StreamReader cReader = new StreamReader(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location) + @"\ntuser.txt", System.Text.Encoding.Default); // 1行ごとに処理 while (cReader.Peek() >= 0) { string keyName = cReader.ReadLine(); var key = reg.GetKey(keyName); if (key == null) { _logger.Warn($"Key not found: {keyName}"); continue; } WriteSpecificKeyInfo(key, sw, ntuserHive); } cReader.Close(); } _logger.Warn($"Saved to '{outFileName}'"); sw.Close(); return; } var totalHits = 0; var hivesWithHits = 0; double totalSeconds = 0; foreach (var hiveToProcess in hivesToProcess) { _logger.Info(""); _logger.Info($"Processing hive '{hiveToProcess}'"); _logger.Info(""); if (File.Exists(hiveToProcess) == false) { _logger.Warn($"'{hiveToProcess}' does not exist. Skipping"); continue; } try { var reg = new RegistryHive(hiveToProcess) { RecoverDeleted = p.Object.RecoverDeleted }; _sw = new Stopwatch(); _sw.Start(); reg.ParseHive(); _logger.Info(""); if (p.Object.DumpKey.Length > 0 && p.Object.DumpDir.Length > 0) { if (Directory.Exists(p.Object.DumpDir) == false) { try { Directory.CreateDirectory(p.Object.DumpDir); } catch (Exception ex) { _logger.Error($"Error creating DumpDir '{p.Object.DumpDir}': {ex.Message}. Exiting"); return; } } var key = reg.GetKey(p.Object.DumpKey); if (key == null) { _logger.Warn($"Key not found: {p.Object.DumpKey}. Exiting"); return; } var nout = $"{key.KeyName}_dump.json"; var fout = Path.Combine(p.Object.DumpDir, nout); _logger.Info("Found key. Dumping data. Be patient as this can take a while..."); var jsons = new JsonSerializer<RegistryKey>(); //TODO need a way to get a simple representation of things here, like //name, path, date, etc vs EVERYTHING using (var sw = new StreamWriter(fout)) { sw.AutoFlush = true; jsons.SerializeToWriter(key,sw); } _logger.Warn($"'{p.Object.DumpKey}' saved to '{fout}'"); } else if (p.Object.KeyName.Length > 0) { var key = reg.GetKey(p.Object.KeyName); if (key == null) { _logger.Warn($"Key '{p.Object.KeyName}' not found."); DumpStopWatchInfo(); continue; } if (p.Object.ValueName.Length > 0) { var val = key.Values.SingleOrDefault(c => c.ValueName == p.Object.ValueName); if (val == null) { _logger.Warn($"Value '{p.Object.ValueName}' not found for key '{p.Object.KeyName}'."); DumpStopWatchInfo(); continue; } _sw.Stop(); totalSeconds += _sw.Elapsed.TotalSeconds; _logger.Info(val); hivesWithHits += 1; totalHits += 1; if (p.Object.SaveToName.Length > 0) { var baseDir = Path.GetDirectoryName(p.Object.SaveToName); if (Directory.Exists(baseDir) == false) { Directory.CreateDirectory(baseDir); } _logger.Info($"Saving contents of '{val.ValueName}' to '{p.Object.SaveToName}'"); File.WriteAllBytes(p.Object.SaveToName, val.ValueDataRaw); } DumpStopWatchInfo(); continue; } hivesWithHits += 1; totalHits += 1; _sw.Stop(); totalSeconds += _sw.Elapsed.TotalSeconds; DumpRootKeyName(reg); DumpKey(key, p.Object.Recursive); DumpStopWatchInfo(); } else if (p.Object.MinimumSize > 0) { var hits = reg.FindByValueSize(p.Object.MinimumSize).ToList(); _sw.Stop(); totalSeconds += _sw.Elapsed.TotalSeconds; if (p.Object.Sort) { hits = hits.OrderBy(t => t.Value.ValueDataRaw.Length).ToList(); } DumpRootKeyName(reg); hivesWithHits += 1; totalHits += hits.Count; foreach (var valueBySizeInfo in hits) { _logger.Info( $"Key: {Helpers.StripRootKeyNameFromKeyPath(valueBySizeInfo.Key.KeyPath)}, Value: {valueBySizeInfo.Value.ValueName}, Size: {valueBySizeInfo.Value.ValueDataRaw.Length:N0}"); } _logger.Info(""); var plural = "s"; if (hits.Count() == 1) { plural = ""; } _logger.Info( $"Found {hits.Count():N0} value{plural} with size greater or equal to {p.Object.MinimumSize:N0} bytes"); DumpStopWatchInfo(); } else if (p.Object.StartDate != null || p.Object.EndDate != null) { DateTimeOffset start; DateTimeOffset end; var startOk = DateTimeOffset.TryParse(p.Object.StartDate + "-0", out start); var endOk = DateTimeOffset.TryParse(p.Object.EndDate + "-0", out end); DateTimeOffset? startGood = null; DateTimeOffset? endGood = null; var hits = new List<SearchHit>(); if (!startOk && p.Object.StartDate != null) { throw new InvalidCastException("'StartDate' is not a valid datetime value"); } if (!endOk && p.Object.EndDate != null) { throw new InvalidCastException("'EndDate' is not a valid datetime value"); } if (startOk && endOk) { startGood = start; endGood = end; hits = reg.FindByLastWriteTime(startGood, endGood).ToList(); } else if (startOk) { startGood = start; hits = reg.FindByLastWriteTime(startGood, null).ToList(); } else if (endOk) { endGood = end; hits = reg.FindByLastWriteTime(null, endGood).ToList(); } _sw.Stop(); totalSeconds += _sw.Elapsed.TotalSeconds; if (p.Object.Sort) { hits = hits.OrderBy(t => t.Key.LastWriteTime ?? new DateTimeOffset()).ToList(); } DumpRootKeyName(reg); hivesWithHits += 1; totalHits += hits.Count; foreach (var searchHit in hits) { searchHit.StripRootKeyName = true; _logger.Info($"Last write: {searchHit.Key.LastWriteTime} Key: {searchHit}"); } var suffix = string.Empty; if (startGood != null || endGood != null) { suffix = $"between {startGood} and {endGood}"; } if (startGood != null && endGood == null) { suffix = $"after {startGood}"; } else if (endGood != null && startGood == null) { suffix = $"before {endGood}"; } _logger.Info(""); var plural = "s"; if (hits.Count() == 1) { plural = ""; } _logger.Info($"Found {hits.Count():N0} key{plural} with last write {suffix}"); DumpStopWatchInfo(); } else if (p.Object.SimpleSearchKey.Length > 0 || p.Object.SimpleSearchValue.Length > 0 || p.Object.SimpleSearchValueData.Length > 0 || p.Object.SimpleSearchValueSlack.Length > 0) { List<SearchHit> hits = null; if (p.Object.SimpleSearchKey.Length > 0) { hits = reg.FindInKeyName(p.Object.SimpleSearchKey, p.Object.RegEx).ToList(); if (p.Object.Sort) { hits = hits.OrderBy(t => t.Key.KeyName).ToList(); } } else if (p.Object.SimpleSearchValue.Length > 0) { hits = reg.FindInValueName(p.Object.SimpleSearchValue, p.Object.RegEx).ToList(); if (p.Object.Sort) { hits = hits.OrderBy(t => t.Value.ValueName).ToList(); } } else if (p.Object.SimpleSearchValueData.Length > 0) { hits = reg.FindInValueData(p.Object.SimpleSearchValueData, p.Object.RegEx, p.Object.Literal) .ToList(); if (p.Object.Sort) { hits = hits.OrderBy(t => t.Value.ValueData).ToList(); } } else if (p.Object.SimpleSearchValueSlack.Length > 0) { hits = reg.FindInValueDataSlack(p.Object.SimpleSearchValueSlack, p.Object.RegEx, p.Object.Literal) .ToList(); if (p.Object.Sort) { hits = hits.OrderBy(t => t.Value.ValueData).ToList(); } } if (hits == null) { _logger.Warn("No search results found"); DumpStopWatchInfo(); continue; } _sw.Stop(); totalSeconds += _sw.Elapsed.TotalSeconds; DumpRootKeyName(reg); //set up highlighting var words = new HashSet<string>(); foreach (var searchHit in hits) { if (p.Object.SimpleSearchKey.Length > 0) { words.Add(p.Object.SimpleSearchKey); } else if (p.Object.SimpleSearchValue.Length > 0) { words.Add(p.Object.SimpleSearchValue); } else if (p.Object.SimpleSearchValueData.Length > 0) { if (p.Object.RegEx) { words.Add(p.Object.SimpleSearchValueData); } else { words.Add(searchHit.HitString); } } else if (p.Object.SimpleSearchValueSlack.Length > 0) { if (p.Object.RegEx) { words.Add(p.Object.SimpleSearchValueSlack); } else { words.Add(searchHit.HitString); } } } AddHighlightingRules(words.ToList(), p.Object.RegEx); hivesWithHits += 1; totalHits += hits.Count; foreach (var searchHit in hits) { searchHit.StripRootKeyName = true; if (p.Object.SimpleSearchValueData.Length > 0 || p.Object.SimpleSearchValueSlack.Length > 0) { if (p.Object.SuppressData) { _logger.Info( $"Key: {Helpers.StripRootKeyNameFromKeyPath(searchHit.Key.KeyPath)}, Value: {searchHit.Value.ValueName}"); } else { if (p.Object.SimpleSearchValueSlack.Length > 0) { _logger.Info( $"Key: {Helpers.StripRootKeyNameFromKeyPath(searchHit.Key.KeyPath)}, Value: {searchHit.Value.ValueName}, Slack: {searchHit.Value.ValueSlack}"); } else { _logger.Info( $"Key: {Helpers.StripRootKeyNameFromKeyPath(searchHit.Key.KeyPath)}, Value: {searchHit.Value.ValueName}, Data: {searchHit.Value.ValueData}"); } } } else if (p.Object.SimpleSearchKey.Length > 0) { _logger.Info($"Key: {Helpers.StripRootKeyNameFromKeyPath(searchHit.Key.KeyPath)}"); } else if (p.Object.SimpleSearchValue.Length > 0) { _logger.Info( $"Key: {Helpers.StripRootKeyNameFromKeyPath(searchHit.Key.KeyPath)}, Value: {searchHit.Value.ValueName}"); } } var target = (ColoredConsoleTarget) LogManager.Configuration.FindTargetByName("console"); target.WordHighlightingRules.Clear(); var suffix = string.Empty; var withRegex = string.Empty; var plural = "s"; if (hits.Count() == 1) { plural = ""; } if (p.Object.SimpleSearchValueData.Length > 0) { suffix = $"value data hit{plural}"; } else if (p.Object.SimpleSearchValueSlack.Length > 0) { suffix = $"value slack hit{plural}"; } else if (p.Object.SimpleSearchKey.Length > 0) { suffix = $"key{plural}"; } else if (p.Object.SimpleSearchValue.Length > 0) { suffix = $"value{plural}"; } if (p.Object.RegEx) { withRegex = " (via RegEx)"; } _logger.Info(""); _logger.Info($"Found {hits.Count():N0} {suffix}{withRegex}"); DumpStopWatchInfo(); } else { _logger.Warn("Nothing to do! =("); } //TODO search deleted?? should only need to look in reg.UnassociatedRegistryValues } catch (Exception ex) { if (!ex.Message.Contains("bad signature")) { _logger.Error($"There was an error: {ex.Message}"); } } } /* if (p.Object.Directory?.Length > 0) { _logger.Info(""); var suffix2 = totalHits == 1 ? "" : "s"; var suffix3 = hivesWithHits == 1 ? "" : "s"; var suffix4 = hivesToProcess.Count == 1 ? "" : "s"; _logger.Info("---------------------------------------------"); _logger.Info($"Directory: {p.Object.Directory}"); _logger.Info( $"Found {totalHits:N0} hit{suffix2} in {hivesWithHits:N0} hive{suffix3} out of {hivesToProcess.Count:N0} file{suffix4}"); _logger.Info($"Total search time: {totalSeconds:N3} seconds"); _logger.Info(""); } */ }
public void Warn(string s) { logger.Warn(s); }
public void Warning(string format, params object[] vargs) { m_logger.Warn(format, vargs); }
public void Warning(Exception e) { _log?.Warn(e); System.Diagnostics.Trace.TraceWarning($"{ProjectName} : error={e.Message};stack={e.StackTrace}"); }
public void WarnFormat(string message, params object[] args) { _logger.Warn(message, args); }
// Конструктор класса. Ему нужно передавать принятого клиента от TcpListener public Client(Logger log, TcpClient client) { _log = log; _log.Trace("Init client"); // Объявим строку, в которой будет хранится запрос клиента string request = ""; // Буфер для хранения принятых от клиента данных byte[] buffer = new byte[1024]; // Переменная для хранения количества байт, принятых от клиента int count; _log.Trace("Read request"); // Читаем из потока клиента до тех пор, пока от него поступают данные while ((count = client.GetStream().Read(buffer, 0, buffer.Length)) > 0) { // Преобразуем эти данные в строку и добавим ее к переменной Request request += Encoding.ASCII.GetString(buffer, 0, count); // Запрос должен обрываться последовательностью \r\n\r\n // Либо обрываем прием данных сами, если длина строки Request превышает 4 килобайта // Нам не нужно получать данные из POST-запроса (и т. п.), а обычный запрос // по идее не должен быть больше 4 килобайт if (request.IndexOf("\r\n\r\n", StringComparison.Ordinal) >= 0 || request.Length > 4096) { break; } } _log.Trace("Request match"); // Парсим строку запроса с использованием регулярных выражений // При этом отсекаем все переменные GET-запроса Match reqMatch = Regex.Match(request, @"^\w+\s+([^\s\?]+)[^\s]*\s+HTTP/.*|"); // Если запрос не удался if (reqMatch == Match.Empty) { // Передаем клиенту ошибку 400 - неверный запрос _log.Warn(HttpStatusCode.BadRequest + ": " + request); SendError(client, HttpStatusCode.BadRequest); return; } _log.Trace("Get request Uri"); // Получаем строку запроса string requestUri = reqMatch.Groups[1].Value; _log.Trace("Unescape request Uri"); // Приводим ее к изначальному виду, преобразуя экранированные символы // Например, "%20" -> " " requestUri = Uri.UnescapeDataString(requestUri); // Если в строке содержится двоеточие, передадим ошибку 400 // Это нужно для защиты от URL типа http://example.com/../../file.txt if (requestUri.IndexOf("..", StringComparison.Ordinal) >= 0) { _log.Warn(HttpStatusCode.BadRequest + ": " + request); SendError(client, HttpStatusCode.BadRequest); return; } if (string.IsNullOrEmpty(requestUri)) { requestUri += "/"; } // Если строка запроса оканчивается на "/", то добавим к ней index.html if (requestUri.EndsWith("/")) { requestUri += "index.html"; } string filePath; if (Settings.IsSite) { filePath = Settings.Folder + requestUri; } else { string fileName; try { fileName = Path.GetFileName(requestUri); } catch (Exception) { _log.Warn(HttpStatusCode.BadRequest + ": " + request); SendError(client, HttpStatusCode.BadRequest); return; } if (!Settings.Files.TryGetValue(fileName, out filePath)) { log.Warn(HttpStatusCode.Forbidden + ": " + request); SendError(client, HttpStatusCode.Forbidden); return; } } // Если в папке www не существует данного файла, посылаем ошибку 404 if (!File.Exists(filePath)) { _log.Warn(HttpStatusCode.NotFound + ": " + request + " => " + filePath); SendError(client, HttpStatusCode.NotFound); return; } _log.Trace("Get Mime type"); // Тип содержимого string contentType = MimeTypes.GetMimeType(requestUri); _log.Trace("Read file from drive"); // Открываем файл, страхуясь на случай ошибки FileStream fs; try { fs = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); } catch (Exception) { // Если случилась ошибка, посылаем клиенту ошибку 500 _log.Warn(HttpStatusCode.InternalServerError + ": " + request + " => " + filePath); SendError(client, HttpStatusCode.InternalServerError); return; } _log.Trace("Create response"); try { // Посылаем заголовкиy string headers = "HTTP/1.1 200 OK\nContent-Type: " + contentType + "\nContent-Length: " + fs.Length + "\n\n"; byte[] headersBuffer = Encoding.ASCII.GetBytes(headers); client.GetStream().Write(headersBuffer, 0, headersBuffer.Length); // Пока не достигнут конец файла while (fs.Position < fs.Length) { // Читаем данные из файла count = fs.Read(buffer, 0, buffer.Length); // И передаем их клиенту client.GetStream().Write(buffer, 0, count); } } catch (Exception ex) { _log.Trace(ex, "Failed to create response"); } finally { // Закроем файл и соединение fs.Close(); try { client.Close(); } catch (Exception ignore) { } } _log.Info(HttpStatusCode.OK + ": " + request + " => " + filePath); }
/// <summary> /// Warnings the specified message. /// </summary> /// <param name="message">The message.</param> public void Warning(string message) { logger.Warn(message); }
public ActiveTaskBase(Task tsk) { log = LogManager.GetCurrentClassLogger(); log.Warn("Created task {0}: {1}", _cnt++, this.GetType().Name); }
public static void Warn(string message, params object[] args) { Log.Warn(message, args); }
public Amcache(string hive, bool recoverDeleted) { _logger = LogManager.GetCurrentClassLogger(); var reg = new RegistryHive(hive) { RecoverDeleted = recoverDeleted }; reg.ParseHive(); var fileKey = reg.GetKey(@"Root\File"); var programsKey = reg.GetKey(@"Root\Programs"); UnassociatedFileEntries = new List<FileEntry>(); ProgramsEntries = new List<ProgramsEntry>(); if (fileKey == null || programsKey == null) { _logger.Error($"Hive does not contain a File and/or Programs key. Processing cannot continue"); return; } //First, we get data for all the Program entries under Programs key foreach (var registryKey in programsKey.SubKeys) { var ProgramName0 = ""; var ProgramVersion1 = ""; var Guid10 = ""; var UninstallGuid11 = ""; var Guid12 = ""; var Dword13 = 0; var Dword14 = 0; var Dword15 = 0; var UnknownBytes = new byte[0]; long Qword17 = 0; var Dword18 = 0; var VenderName2 = ""; var LocaleID3 = ""; var Dword5 = 0; var InstallSource6 = ""; var UninstallKey7 = ""; DateTimeOffset? EpochA = null; DateTimeOffset? EpochB = null; var PathListd = ""; var Guidf = ""; var RawFiles = ""; try { foreach (var value in registryKey.Values) { switch (value.ValueName) { case "0": ProgramName0 = value.ValueData; break; case "1": ProgramVersion1 = value.ValueData; break; case "2": VenderName2 = value.ValueData; break; case "3": LocaleID3 = value.ValueData; break; case "5": Dword5 = int.Parse(value.ValueData); break; case "6": InstallSource6 = value.ValueData; break; case "7": UninstallKey7 = value.ValueData; break; case "a": try { var seca = long.Parse(value.ValueData); if (seca > 0) { EpochA = DateTimeOffset.FromUnixTimeSeconds(seca).ToUniversalTime(); } } catch (Exception ex) { //sometimes the number is way too big } break; case "b": var seconds = long.Parse(value.ValueData); if (seconds > 0) { EpochB = DateTimeOffset.FromUnixTimeSeconds(seconds).ToUniversalTime(); } break; case "d": PathListd = value.ValueData; break; case "f": Guidf = value.ValueData; break; case "10": Guid10 = value.ValueData; break; case "11": UninstallGuid11 = value.ValueData; break; case "12": Guid12 = value.ValueData; break; case "13": Dword13 = int.Parse(value.ValueData); break; case "14": Dword13 = int.Parse(value.ValueData); break; case "15": Dword13 = int.Parse(value.ValueData); break; case "16": UnknownBytes = value.ValueDataRaw; break; case "17": Qword17 = long.Parse(value.ValueData); break; case "18": Dword18 = int.Parse(value.ValueData); break; case "Files": RawFiles = value.ValueData; break; default: _logger.Warn( $"Unknown value name in Program at path {registryKey.KeyPath}: {value.ValueName}"); break; } } var pe = new ProgramsEntry(ProgramName0, ProgramVersion1, VenderName2, LocaleID3, InstallSource6, UninstallKey7, Guid10, Guid12, UninstallGuid11, Dword5, Dword13, Dword14, Dword15, UnknownBytes, Qword17, Dword18, EpochA, EpochB, PathListd, Guidf, RawFiles, registryKey.KeyName, registryKey.LastWriteTime.Value); ProgramsEntries.Add(pe); } catch (Exception ex) { _logger.Error($"Error parsing ProgramsEntry at {registryKey.KeyPath}. Error: {ex.Message}"); _logger.Error( $"Please send the following text to [email protected]. \r\n\r\nKey data: {registryKey}"); } } //For each Programs entry, add the related Files entries from Files\Volume subkey, put the rest in unassociated foreach (var registryKey in fileKey.SubKeys) { //These are the guids for volumes foreach (var subKey in registryKey.SubKeys) { var prodName = ""; int? langId = null; var fileVerString = ""; var fileVerNum = ""; var fileDesc = ""; var compName = ""; var fullPath = ""; var switchBack = ""; var peHash = ""; var progID = ""; var sha = ""; long unknown1 = 0; long unknown2 = 0; var unknown3 = 0; var unknown4 = 0; var unknown5 = 0; var unknown6 = 0; int? fileSize = null; int? peHeaderSize = null; int? peHeaderChecksum = null; DateTimeOffset? created = null; DateTimeOffset? lm = null; DateTimeOffset? lm2 = null; DateTimeOffset? compTime = null; var hasLinkedProgram = false; try { //these are the files executed from the volume foreach (var keyValue in subKey.Values) { var keyVal = int.Parse(keyValue.ValueName, NumberStyles.HexNumber); switch (keyVal) { case ProductName: prodName = keyValue.ValueData; break; case CompanyName: compName = keyValue.ValueData; break; case FileVersionNumber: fileVerNum = keyValue.ValueData; break; case LanguageCode: langId = int.Parse(keyValue.ValueData); break; case SwitchBackContext: switchBack = keyValue.ValueData; break; case FileVersionString: fileVerString = keyValue.ValueData; break; case FileSize: fileSize = int.Parse(keyValue.ValueData); break; case PEHeaderSize: peHeaderSize = int.Parse(keyValue.ValueData); break; case PEHeaderHash: peHash = keyValue.ValueData; break; case PEHeaderChecksum: peHeaderChecksum = int.Parse(keyValue.ValueData); break; case Unknown1: unknown1 = long.Parse(keyValue.ValueData); break; case Unknown2: unknown2 = long.Parse(keyValue.ValueData); break; case FileDescription: fileDesc = keyValue.ValueData; break; case Unknown3: unknown3 = int.Parse(keyValue.ValueData); break; case CompileTime: compTime = DateTimeOffset.FromUnixTimeSeconds(long.Parse(keyValue.ValueData)) .ToUniversalTime(); break; case Unknown4: unknown4 = int.Parse(keyValue.ValueData); break; case LastModified: lm = DateTimeOffset.FromFileTime(long.Parse(keyValue.ValueData)).ToUniversalTime(); break; case Created: created = DateTimeOffset.FromFileTime(long.Parse(keyValue.ValueData)).ToUniversalTime(); break; case FullPath: fullPath = keyValue.ValueData; break; case Unknown5: unknown5 = int.Parse(keyValue.ValueData); break; case Unknown6: unknown6 = int.Parse(keyValue.ValueData); break; case LastModified2: lm2 = DateTimeOffset.FromFileTime(long.Parse(keyValue.ValueData)).ToUniversalTime(); break; case ProgramID: progID = keyValue.ValueData; var program = ProgramsEntries.SingleOrDefault(t => t.ProgramID == progID); if (program != null) { hasLinkedProgram = true; } break; case SHA1: sha = keyValue.ValueData; break; default: _logger.Warn( $"Unknown value name when processing FileEntry at path '{subKey.KeyPath}': 0x{keyVal:X}"); break; } } if (fullPath.Length == 0) { continue; } TotalFileEntries += 1; var fe = new FileEntry(prodName, progID, sha, fullPath, lm2, registryKey.KeyName, registryKey.LastWriteTime.Value, subKey.KeyName, subKey.LastWriteTime.Value, unknown5, compName, langId, fileVerString, peHash, fileVerNum, fileDesc, unknown1, unknown2, unknown3, unknown4, switchBack, fileSize, compTime, peHeaderSize, lm, created, peHeaderChecksum, unknown6, subKey.KeyName); if (hasLinkedProgram) { var program = ProgramsEntries.SingleOrDefault(t => t.ProgramID == fe.ProgramID); fe.ProgramName = program.ProgramName_0; program.FileEntries.Add(fe); } else { fe.ProgramName = "Unassociated"; UnassociatedFileEntries.Add(fe); } } catch (Exception ex) { _logger.Error($"Error parsing FileEntry at {subKey.KeyPath}. Error: {ex.Message}"); _logger.Error( $"Please send the following text to [email protected]. \r\n\r\nKey data: {subKey}"); } } } }
public void Warn(string message) { _logger.Warn(message); }
public void Init(string dirName, string parentProcessId) { try { var exeDir = new DirectoryInfo(AppDomain.CurrentDomain.BaseDirectory).Parent.Parent.FullName; GlobalDiagnosticsContext.Set("ExeBaseDir", exeDir); GlobalDiagnosticsContext.Set("SubDirName", dirName); GlobalDiagnosticsContext.Set("ParentProcess", parentProcessId); ConfigurationItemFactory.Default.Targets.RegisterDefinition("ServiceManager", typeof(ServiceManagerTarget)); ConfigurationItemFactory.Default.Targets.RegisterDefinition("ServiceManagerNotification", typeof(NLog.Targets.NullTarget)); _dirName = dirName; _logger = LogManager.GetCurrentClassLogger(); AppDomain.CurrentDomain.UnhandledException += OnAppDomainUnhandledException; TaskScheduler.UnobservedTaskException += OnTaskSchedulerUnobservedTaskException; var list = new List<ExtensionInfo>(); var files = new DirectoryInfo(AppDomain.CurrentDomain.BaseDirectory).GetFiles("*.dll"); var asmExclusionsPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "loader.exclude.txt"); var exclusions = new HashSet<string>(); if (File.Exists(asmExclusionsPath)) exclusions = new HashSet<string>( File.ReadAllLines(asmExclusionsPath) .Select(s => Path.Combine(AppDomain.CurrentDomain.BaseDirectory, s).ToLower()) .Where(File.Exists)); foreach (var file in files) { if (exclusions.Contains(file.FullName.ToLower())) continue; try { var asm = Assembly.Load(Path.GetFileNameWithoutExtension(file.Name)); var types = (from t in asm.GetTypes() where t.GetInterfaces().Any(i => i == typeof(IServerExtension)) && t.IsClass && !t.IsAbstract && t.GetConstructors().Where(i => i.GetParameters().Count() == 0).Any() select t); var typeMap = (from t in types select new { Ext = (IServerExtension)Activator.CreateInstance(t) }).ToDictionary(k => k.Ext.ID, v => v.Ext); list.AddRange( typeMap.Values.Select(ext => new ExtensionInfo { ExtensionID = ext.ID, Name = ext.Name, Description = ext.Description, AssemblyQualifiedName = ext.GetType().AssemblyQualifiedName }) ); } catch (ReflectionTypeLoadException ex) { _logger.Warn("Unable to load: " + file.Name); foreach(var lx in ex.LoaderExceptions) _logger.Warn(" => " + lx.Message); continue; } catch (BadImageFormatException) { continue; } } _infos = list.ToArray(); _logger.Info("Obtained info for " + _infos.Length + " available extensions"); } catch (Exception ex) { _logger.FatalException("Exception while activating exception: ", ex); Console.WriteLine(ex.ToString()); throw; } }
private void VerifyMessagesInMockChannel(Logger aiLogger, string instrumentationKey) { aiLogger.Trace("Sample trace message"); aiLogger.Debug("Sample debug message"); aiLogger.Info("Sample informational message"); aiLogger.Warn("Sample warning message"); aiLogger.Error("Sample error message"); aiLogger.Fatal("Sample fatal error message"); AdapterHelper.ValidateChannel(this.adapterHelper, instrumentationKey, 6); }
public static void Main(string[] args) { var config = new NLog.Config.LoggingConfiguration(); var logfile = new NLog.Targets.FileTarget("logfile") { FileName = "TestAdapter.log" }; var logconsole = new NLog.Targets.ConsoleTarget("logconsole"); config.AddRule(LogLevel.Debug, LogLevel.Fatal, logconsole); config.AddRule(LogLevel.Info, LogLevel.Fatal, logfile); NLog.LogManager.Configuration = config; if (args.Length == 0) { Help(); } _log.Info("Lightstreamer PortfolioDemo .NET Adapter Custom Server starting..."); Server.SetLoggerProvider(new Log4NetLoggerProviderWrapper()); IDictionary parameters = new Hashtable(); string host = null; int rrPortMD = -1; int rrPortD = -1; int notifPortD = -1; string name = null; for (int i = 0; i < args.Length; i++) { string arg = args[i]; if (arg.StartsWith(PREFIX1) || arg.StartsWith(PREFIX2)) { arg = arg.Substring(1).ToLower(); if (arg.Equals(ARG_HELP_SHORT) || arg.Equals(ARG_HELP_LONG)) { Help(); } else if (arg.Equals(ARG_HOST)) { i++; host = args[i]; _log.Debug("Found argument: '" + ARG_HOST + "' with value: '" + host + "'"); } else if (arg.Equals(ARG_METADATA_RR_PORT)) { i++; rrPortMD = Int32.Parse(args[i]); _log.Debug("Found argument: '" + ARG_METADATA_RR_PORT + "' with value: '" + rrPortMD + "'"); } else if (arg.Equals(ARG_DATA_RR_PORT)) { i++; rrPortD = Int32.Parse(args[i]); _log.Debug("Found argument: '" + ARG_DATA_RR_PORT + "' with value: '" + rrPortD + "'"); } else if (arg.Equals(ARG_DATA_NOTIF_PORT)) { i++; notifPortD = Int32.Parse(args[i]); _log.Debug("Found argument: '" + ARG_DATA_NOTIF_PORT + "' with value: '" + notifPortD + "'"); } else if (arg.Equals(ARG_NAME)) { i++; name = args[i]; _log.Debug("Found argument: '" + ARG_NAME + "' with value: '" + name + "'"); } } else { int sep = arg.IndexOf(SEP); if (sep < 1) { _log.Warn("Skipping unrecognizable argument: '" + arg + "'"); } else { string par = arg.Substring(0, sep).Trim(); string val = arg.Substring(sep + 1).Trim(); parameters[par] = val; _log.Debug("Found parameter: '" + par + "' with value: '" + val + "'"); } } } PortfolioFeedSimulator feed = new PortfolioFeedSimulator(); // A reference to the feed simulator will be supplied // to both the Data and the Matedata Adapters. try { { MetadataProviderServer server = new MetadataProviderServer(); Lightstreamer.Adapters.PortfolioDemo.Metadata.PortfolioMetadataAdapter adapter = new Lightstreamer.Adapters.PortfolioDemo.Metadata.PortfolioMetadataAdapter(); // We complete the Metadata Adapter initialization by supplying // a reference to the feed simulator through a custom method; // for this reason, the Portfolio Demo Metadata Adapter // does not support the basic DotNetServer.exe launcher // provided by LS library, adapter.SetFeed(feed); server.Adapter = adapter; server.AdapterParams = parameters; // server.AdapterConfig not needed by PortfolioMetadataAdapter if (name != null) { server.Name = name; } _log.Debug("Remote Metadata Adapter initialized"); ServerStarter starter = new ServerStarter(host, rrPortMD, -1); starter.Launch(server); } { DataProviderServer server = new DataProviderServer(); Lightstreamer.Adapters.PortfolioDemo.Data.PortfolioAdapter adapter = new Lightstreamer.Adapters.PortfolioDemo.Data.PortfolioAdapter(); // We complete the Data Adapter initialization by supplying // a reference to the feed simulator through a custom method; // for this reason, the Portfolio Demo Data Adapter // does not support the basic DotNetServer.exe launcher // provided by LS library, adapter.SetFeed(feed); server.Adapter = adapter; // server.AdapterParams not needed by PortfolioAdapter // server.AdapterConfig not needed by PortfolioAdapter if (name != null) { server.Name = name; } _log.Debug("Remote Data Adapter initialized"); ServerStarter starter = new ServerStarter(host, rrPortD, notifPortD); starter.Launch(server); } } catch (Exception e) { _log.Fatal("Exception caught while starting the server: " + e.Message + ", aborting...", e); } _log.Info("Lightstreamer PortfolioDemo .NET Adapter Custom Server running"); }
private static void Main(string[] args) { Licensing.RegisterLicenseFromFileIfExists(SSLicenseFile); LoadMACs(); SetupNLog(); _logger = LogManager.GetCurrentClassLogger(); if (!CheckForDotnet46()) { _logger.Warn(".net 4.6 not detected. Please install .net 4.6 and try again."); return; } _fluentCommandLineParser = new FluentCommandLineParser<ApplicationArguments> { IsCaseSensitive = false }; _fluentCommandLineParser.Setup(arg => arg.File) .As('f') .WithDescription("File to process. Either this or -d is required"); _fluentCommandLineParser.Setup(arg => arg.Directory) .As('d') .WithDescription("Directory to recursively process. Either this or -f is required"); _fluentCommandLineParser.Setup(arg => arg.AllFiles) .As("all") .WithDescription( "Process all files in directory vs. only files matching *.lnk\r\n").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.CsvDirectory) .As("csv") .WithDescription( "Directory to save CSV (tab separated) formatted results to. Be sure to include the full path in double quotes"); _fluentCommandLineParser.Setup(arg => arg.XmlDirectory) .As("xml") .WithDescription( "Directory to save XML formatted results to. Be sure to include the full path in double quotes"); _fluentCommandLineParser.Setup(arg => arg.xHtmlDirectory) .As("html") .WithDescription( "Directory to save xhtml formatted results to. Be sure to include the full path in double quotes"); _fluentCommandLineParser.Setup(arg => arg.JsonDirectory) .As("json") .WithDescription( "Directory to save json representation to. Use --pretty for a more human readable layout"); _fluentCommandLineParser.Setup(arg => arg.JsonPretty) .As("pretty") .WithDescription( "When exporting to json, use a more human readable layout\r\n").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.Quiet) .As('q') .WithDescription( "Only show the filename being processed vs all output. Useful to speed up exporting to json and/or csv\r\n") .SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.NoTargetIDList) .As("nid") .WithDescription( "Suppress Target ID list details from being displayed. Default is false.").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.NoExtraBlocks) .As("neb") .WithDescription( "Suppress Extra blocks information from being displayed. Default is false.\r\n").SetDefault(false); _fluentCommandLineParser.Setup(arg => arg.DateTimeFormat) .As("dt") .WithDescription( "The custom date/time format to use when displaying time stamps. Default is: yyyy-MM-dd HH:mm:ss K").SetDefault("yyyy-MM-dd HH:mm:ss K"); _fluentCommandLineParser.Setup(arg => arg.PreciseTimestamps) .As("mp") .WithDescription( "Display higher precision for time stamps. Default is false").SetDefault(false); var header = $"LECmd version {Assembly.GetExecutingAssembly().GetName().Version}" + "\r\n\r\nAuthor: Eric Zimmerman ([email protected])" + "\r\nhttps://github.com/EricZimmerman/LECmd"; var footer = @"Examples: LECmd.exe -f ""C:\Temp\foobar.lnk""" + "\r\n\t " + @" LECmd.exe -f ""C:\Temp\somelink.lnk"" --json ""D:\jsonOutput"" --jsonpretty" + "\r\n\t " + @" LECmd.exe -d ""C:\Temp"" --csv ""c:\temp"" --html c:\temp --xml c:\temp\xml -q" + "\r\n\t " + @" LECmd.exe -f ""C:\Temp\some other link.lnk"" --nid --neb " + "\r\n\t " + @" LECmd.exe -d ""C:\Temp"" --all" + "\r\n\t" + "\r\n\t"+ " Short options (single letter) are prefixed with a single dash. Long commands are prefixed with two dashes\r\n"; _fluentCommandLineParser.SetupHelp("?", "help") .WithHeader(header) .Callback(text => _logger.Info(text + "\r\n" + footer)); var result = _fluentCommandLineParser.Parse(args); if (result.HelpCalled) { return; } if (result.HasErrors) { _logger.Error(""); _logger.Error(result.ErrorText); _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.File) && UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.Directory)) { _fluentCommandLineParser.HelpOption.ShowHelp(_fluentCommandLineParser.Options); _logger.Warn("Either -f or -d is required. Exiting"); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.File) == false && !File.Exists(_fluentCommandLineParser.Object.File)) { _logger.Warn($"File '{_fluentCommandLineParser.Object.File}' not found. Exiting"); return; } if (UsefulExtension.IsNullOrEmpty(_fluentCommandLineParser.Object.Directory) == false && !Directory.Exists(_fluentCommandLineParser.Object.Directory)) { _logger.Warn($"Directory '{_fluentCommandLineParser.Object.Directory}' not found. Exiting"); return; } _logger.Info(header); _logger.Info(""); _logger.Info($"Command line: {string.Join(" ", Environment.GetCommandLineArgs().Skip(1))}\r\n"); if (_fluentCommandLineParser.Object.PreciseTimestamps) { _fluentCommandLineParser.Object.DateTimeFormat = _preciseTimeFormat; } _processedFiles = new List<LnkFile>(); _failedFiles = new List<string>(); if (_fluentCommandLineParser.Object.File?.Length > 0) { LnkFile lnk = null; try { lnk = ProcessFile(_fluentCommandLineParser.Object.File); if (lnk != null) { _processedFiles.Add(lnk); } } catch (UnauthorizedAccessException ua) { _logger.Error( $"Unable to access '{_fluentCommandLineParser.Object.File}'. Are you running as an administrator? Error: {ua.Message}"); return; } catch (Exception ex) { _logger.Error( $"Error processing file '{_fluentCommandLineParser.Object.Directory}' Please send it to [email protected]. Error: {ex.Message}"); return; } } else { _logger.Info($"Looking for lnk files in '{_fluentCommandLineParser.Object.Directory}'"); _logger.Info(""); string[] lnkFiles = null; try { var mask = "*.lnk"; if (_fluentCommandLineParser.Object.AllFiles) { mask = "*"; } lnkFiles = Directory.GetFiles(_fluentCommandLineParser.Object.Directory, mask, SearchOption.AllDirectories); } catch (UnauthorizedAccessException ua) { _logger.Error( $"Unable to access '{_fluentCommandLineParser.Object.Directory}'. Error message: {ua.Message}"); return; } catch (Exception ex) { _logger.Error( $"Error getting lnk files in '{_fluentCommandLineParser.Object.Directory}'. Error: {ex.Message}"); return; } _logger.Info($"Found {lnkFiles.Length:N0} files"); _logger.Info(""); var sw = new Stopwatch(); sw.Start(); foreach (var file in lnkFiles) { var lnk = ProcessFile(file); if (lnk != null) { _processedFiles.Add(lnk); } } sw.Stop(); if (_fluentCommandLineParser.Object.Quiet) { _logger.Info(""); } _logger.Info( $"Processed {lnkFiles.Length - _failedFiles.Count:N0} out of {lnkFiles.Length:N0} files in {sw.Elapsed.TotalSeconds:N4} seconds"); if (_failedFiles.Count > 0) { _logger.Info(""); _logger.Warn("Failed files"); foreach (var failedFile in _failedFiles) { _logger.Info($" {failedFile}"); } } } if (_processedFiles.Count > 0) { _logger.Info(""); try { CsvWriter csv = null; StreamWriter sw = null; if (_fluentCommandLineParser.Object.CsvDirectory?.Length > 0) { if (Directory.Exists(_fluentCommandLineParser.Object.CsvDirectory) == false) { _logger.Warn($"'{_fluentCommandLineParser.Object.CsvDirectory} does not exist. Creating...'"); Directory.CreateDirectory(_fluentCommandLineParser.Object.CsvDirectory); } var outName = $"{DateTimeOffset.Now.ToString("yyyyMMddHHmmss")}_LECmd_Output.tsv"; var outFile = Path.Combine(_fluentCommandLineParser.Object.CsvDirectory, outName); _fluentCommandLineParser.Object.CsvDirectory = Path.GetFullPath(outFile); _logger.Warn( $"CSV (tab separated) output will be saved to '{Path.GetFullPath(outFile)}'"); try { sw = new StreamWriter(outFile); csv = new CsvWriter(sw); csv.Configuration.Delimiter = $"{'\t'}"; csv.WriteHeader(typeof(CsvOut)); } catch (Exception ex) { _logger.Error( $"Unable to open '{outFile}' for writing. CSV export canceled. Error: {ex.Message}"); } } if (_fluentCommandLineParser.Object.JsonDirectory?.Length > 0) { if (Directory.Exists(_fluentCommandLineParser.Object.JsonDirectory) == false) { _logger.Warn($"'{_fluentCommandLineParser.Object.JsonDirectory} does not exist. Creating...'"); Directory.CreateDirectory(_fluentCommandLineParser.Object.JsonDirectory); } _logger.Warn($"Saving json output to '{_fluentCommandLineParser.Object.JsonDirectory}'"); } if (_fluentCommandLineParser.Object.XmlDirectory?.Length > 0) { { if (Directory.Exists(_fluentCommandLineParser.Object.XmlDirectory) == false) { _logger.Warn($"'{_fluentCommandLineParser.Object.XmlDirectory} does not exist. Creating...'"); Directory.CreateDirectory(_fluentCommandLineParser.Object.XmlDirectory); } } _logger.Warn($"Saving XML output to '{_fluentCommandLineParser.Object.XmlDirectory}'"); } XmlTextWriter xml = null; if (_fluentCommandLineParser.Object.xHtmlDirectory?.Length > 0) { var outDir = Path.Combine(_fluentCommandLineParser.Object.xHtmlDirectory, $"{DateTimeOffset.UtcNow.ToString("yyyyMMddHHmmss")}_LECmd_Output_for_{_fluentCommandLineParser.Object.xHtmlDirectory.Replace(@":\", "_").Replace(@"\", "_")}"); if (Directory.Exists(outDir) == false) { Directory.CreateDirectory(outDir); } File.WriteAllText(Path.Combine(outDir, "normalize.css"), Resources.normalize); File.WriteAllText(Path.Combine(outDir, "style.css"), Resources.style); var outFile = Path.Combine(_fluentCommandLineParser.Object.xHtmlDirectory, outDir, "index.xhtml"); _logger.Warn($"Saving HTML output to '{outFile}'"); xml = new XmlTextWriter(outFile, Encoding.UTF8) { Formatting = Formatting.Indented, Indentation = 4 }; xml.WriteStartDocument(); xml.WriteProcessingInstruction("xml-stylesheet", "href=\"normalize.css\""); xml.WriteProcessingInstruction("xml-stylesheet", "href=\"style.css\""); xml.WriteStartElement("document"); } foreach (var processedFile in _processedFiles) { var o = GetCsvFormat(processedFile); try { csv?.WriteRecord(o); } catch (Exception ex) { _logger.Error( $"Error writing record for '{processedFile.SourceFile}' to '{_fluentCommandLineParser.Object.CsvDirectory}'. Error: {ex.Message}"); } if (_fluentCommandLineParser.Object.JsonDirectory?.Length > 0) { SaveJson(processedFile, _fluentCommandLineParser.Object.JsonPretty, _fluentCommandLineParser.Object.JsonDirectory); } //XHTML xml?.WriteStartElement("Container"); xml?.WriteElementString("SourceFile", o.SourceFile); xml?.WriteElementString("SourceCreated", o.SourceCreated); xml?.WriteElementString("SourceModified", o.SourceModified); xml?.WriteElementString("SourceAccessed", o.SourceAccessed); xml?.WriteElementString("TargetCreated", o.TargetCreated); xml?.WriteElementString("TargetModified", o.TargetModified); xml?.WriteElementString("TargetAccessed", o.TargetModified); xml?.WriteElementString("FileSize", o.FileSize.ToString()); xml?.WriteElementString("RelativePath", o.RelativePath); xml?.WriteElementString("WorkingDirectory", o.WorkingDirectory); xml?.WriteElementString("FileAttributes", o.FileAttributes); xml?.WriteElementString("HeaderFlags", o.HeaderFlags); xml?.WriteElementString("DriveType", o.DriveType); xml?.WriteElementString("DriveSerialNumber", o.DriveSerialNumber); xml?.WriteElementString("DriveLabel", o.DriveLabel); xml?.WriteElementString("LocalPath", o.LocalPath); xml?.WriteElementString("CommonPath", o.CommonPath); xml?.WriteElementString("Arguments", o.Arguments); xml?.WriteElementString("TargetIDAbsolutePath", o.TargetIDAbsolutePath); xml?.WriteElementString("TargetMFTEntryNumber", $"{o.TargetMFTEntryNumber}"); xml?.WriteElementString("TargetMFTSequenceNumber", $"{o.TargetMFTSequenceNumber}"); xml?.WriteElementString("MachineID", o.MachineID); xml?.WriteElementString("MachineMACAddress", o.MachineMACAddress); xml?.WriteElementString("MACVendor", o.MACVendor); xml?.WriteElementString("TrackerCreatedOn", o.TrackerCreatedOn); xml?.WriteElementString("ExtraBlocksPresent", o.ExtraBlocksPresent); xml?.WriteEndElement(); if (_fluentCommandLineParser.Object.XmlDirectory?.Length > 0) { SaveXML(o, _fluentCommandLineParser.Object.XmlDirectory); } } //Close CSV stuff sw?.Flush(); sw?.Close(); //Close XML xml?.WriteEndElement(); xml?.WriteEndDocument(); xml?.Flush(); } catch (Exception ex) { _logger.Error( $"Error exporting data! Error: {ex.Message}"); } } }