public void UpdateApplication(IApplicationPaths appPaths, string archive, ILogger logger, string restartServiceName) { // First see if there is a version file and read that in var version = "Unknown"; if (File.Exists(archive + ".ver")) { version = File.ReadAllText(archive + ".ver"); } // Use our installer passing it the specific archive // We need to copy to a temp directory and execute it there var source = Path.Combine(appPaths.ProgramSystemPath, UpdaterExe); logger.Info("Copying updater to temporary location"); var tempUpdater = Path.Combine(Path.GetTempPath(), UpdaterExe); File.Copy(source, tempUpdater, true); source = Path.Combine(appPaths.ProgramSystemPath, UpdaterDll); var tempUpdaterDll = Path.Combine(Path.GetTempPath(), UpdaterDll); logger.Info("Copying updater dependencies to temporary location"); File.Copy(source, tempUpdaterDll, true); var product = "mbt"; // Our updater needs SS and ionic source = Path.Combine(appPaths.ProgramSystemPath, "ServiceStack.Text.dll"); File.Copy(source, Path.Combine(Path.GetTempPath(), "ServiceStack.Text.dll"), true); source = Path.Combine(appPaths.ProgramSystemPath, "SharpCompress.dll"); File.Copy(source, Path.Combine(Path.GetTempPath(), "SharpCompress.dll"), true); logger.Info("Starting updater process."); Process.Start(tempUpdater, string.Format("product={0} archive=\"{1}\" caller={2} pismo=false version={3} service={4} installpath=\"{5}\"", product, archive, Process.GetCurrentProcess().Id, version, restartServiceName ?? string.Empty, appPaths.ProgramDataPath)); // That's it. The installer will do the work once we exit }
//public static IUserGroupPermissionProvider DefaultUserGroupPermissionProvider() //{ // return new DefaultUserGroupPermissionProvider(); //} public override Task <AdobeConnectUser> FindAsync(string userName, string password) { Task <AdobeConnectUser> taskInvoke = Task.Run(async() => { string[] parts = userName.Split(new char[] { '|' }, StringSplitOptions.RemoveEmptyEntries); string companyToken = parts[0]; string acDomain = parts[1]; string acLogin = parts[2]; if (!_acDomainValidator.IsValid(companyToken, acDomain)) { _logger?.Warn($"[UserManager.FindAsync] AC domain is not valid for companyToken. AcDomain={acDomain}"); return(null); } string sessionToken; var connectionDetails = new ConnectionDetails(new Uri(acDomain)); var provider = new AdobeConnectProvider(connectionDetails); UserInfo acPrincipal = TryLogin(provider, new AdobeConnectAccess(new Uri(acDomain), acLogin, password), out sessionToken); _logger?.Info($"[UserManager.FindAsync] ACSession={sessionToken}"); if (acPrincipal == null) { _logger?.Warn($"[UserManager.FindAsync] Principal not found. AcDomain={acDomain}, AcLogin={acLogin}"); return(null); } var roles = new List <string>(); if (_userAuthorizationProvider != null) { roles.AddRange(_userAuthorizationProvider.GetUserPermissions(provider, acPrincipal)); } var applicationUser = new AdobeConnectUser { Id = acPrincipal.UserId, UserName = acLogin, CompanyToken = companyToken, AcDomain = acDomain, AcSessionToken = sessionToken, Roles = roles }; var store = Store as IEdugameCloudUserStore <AdobeConnectUser>; if (store != null) { var user = await store.FindByPrincipalIdAndCompanyTokenAndAcDomainAsync(applicationUser.Id, companyToken, acDomain); if (user == null) { _logger?.Warn($"[UserManager.FindAsync] UserStore.CreateAsync. PrincipalId={applicationUser.Id}"); await store.CreateAsync(applicationUser, password); } } return(applicationUser); }); return(taskInvoke); }
/// <summary> /// Retrieves kernel export table. /// </summary> /// <returns>Kernel export addresses.</returns> private long[] GetExportTable() { _logger?.Info("Reading kernel export table"); // gets export table with function offsets long peBase = _xbox.Memory.ReadUInt32(Address + 0x3C); long dataDirectory = _xbox.Memory.ReadUInt32(Address + peBase + 0x78); int exportCount = _xbox.Memory.ReadInt32(Address + dataDirectory + 0x14); long exportAddress = Address + _xbox.Memory.ReadUInt32(Address + dataDirectory + 0x1C); byte[] exportBytes = _xbox.Memory.ReadBytes(exportAddress, exportCount * sizeof(uint)); // converts them to absolute addresses long[] exportTable = new long[exportCount + 1]; for (int i = 0; i < exportCount; i++) { long offset = BitConverter.ToUInt32(exportBytes, i * 4); if (offset != 0) { exportTable[i + 1] = Address + offset; } } return(exportTable); }
public PostImporter() { _checkpoint = new FileCheckpoint("postsLoaded"); _logger = new EventStore.ClientAPI.Common.Log.ConsoleLogger(); var _connectionSettings = ConnectionSettings.Create() .UseConsoleLogger() .KeepReconnecting() .KeepRetrying() .OnConnected(_ => _logger.Info("Event Store Connected")) .OnDisconnected(_ => _logger.Error("Event Store Disconnected")) .OnReconnecting(_ => _logger.Info("Event Store Reconnecting")) .OnErrorOccurred((c, e) => _logger.Error(e, "Event Store Error :(")); _connection = EventStoreConnection.Create(_connectionSettings, new IPEndPoint(IPAddress.Parse("192.81.222.61"), 1113)); _connection.Connect(); ThreadPool.SetMaxThreads(20, 20); ThreadPool.SetMinThreads(20, 20); //ServicePointManager.DefaultConnectionLimit = 1000; ServicePointManager.Expect100Continue = false; ServicePointManager.ServerCertificateValidationCallback = Validator; //ServicePointManager.EnableDnsRoundRobin = false; //ServicePointManager.DnsRefreshTimeout = Int32.MaxValue; }
/// <summary> /// 开始监控文件夹 /// </summary> public void Start() { log = LogManager.GetLogger("FileWatch.Start()"); log.Info("准备开始监控文件夹"); txtBox.AppendText("准备开始监控文件夹" + System.Environment.NewLine); FileSystemWatcher watcher = new FileSystemWatcher(); string emfFilePath = string.Empty; if (string.IsNullOrEmpty(emfFilePath)) { emfFilePath = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), "test"); if (!Directory.Exists(emfFilePath)) { Directory.CreateDirectory(emfFilePath); } else { Directory.GetFiles(emfFilePath).ToList().ForEach(c => { if (c != null) File.Delete(c); }); } } log.Info("监控的文件夹为:" + emfFilePath); txtBox.AppendText("监控的文件夹为:" + emfFilePath + System.Environment.NewLine); watcher.Path = emfFilePath; //监控文件的上次访问、上次写入、文件名、文件目录、文件大小; watcher.NotifyFilter = NotifyFilters.LastAccess | NotifyFilters.LastWrite | NotifyFilters.FileName | NotifyFilters.DirectoryName | NotifyFilters.Size; //仅仅监控emf文件 watcher.Filter = "*.xml"; watcher.Created += new FileSystemEventHandler(watcher_Created); watcher.Deleted += new FileSystemEventHandler(watcher_Deleted); watcher.EnableRaisingEvents = true; //设置为true则触发删除和deleted事件; }
public StartupController(IObjectProvider objectProvider, ILogger logger, IApplicationStatePublisher applicationStatePublisher) { applicationStatePublisher.Publish(ApplicationState.Startup); logger.Info($"Starting Tail Blazer version v{Assembly.GetEntryAssembly().GetName().Version}"); logger.Info($"at {DateTime.UtcNow}"); //run start up jobs objectProvider.Get<FileHeaderNamingJob>(); objectProvider.Get<UhandledExceptionHandler>(); var settingsRegister = objectProvider.Get<ISettingsRegister>(); settingsRegister.Register(new GeneralOptionsConverter(), "GeneralOptions"); settingsRegister.Register(new RecentFilesToStateConverter(), "RecentFiles"); settingsRegister.Register(new StateBucketConverter(), "BucketOfState"); settingsRegister.Register(new RecentSearchToStateConverter(), "RecentSearch"); settingsRegister.Register(new TextAssociationToStateConverter(), "TextAssociation"); settingsRegister.Register(new SearchMetadataToStateConverter(), "GlobalSearch"); //TODO: Need type scanner then this code is not required var viewFactoryRegister = objectProvider.Get<IViewFactoryRegister>(); viewFactoryRegister.Register<TailViewModelFactory>(); objectProvider.Get<SystemSetterJob>(); logger.Info("Starting complete"); }
public MainManager(ILogger logger, LoopManager loopManager, DeviceManager deviceManager, EffectManager effectManager, ProfileManager profileManager, PipeServer pipeServer) { Logger = logger; LoopManager = loopManager; DeviceManager = deviceManager; EffectManager = effectManager; ProfileManager = profileManager; PipeServer = pipeServer; _processTimer = new Timer(1000); _processTimer.Elapsed += ScanProcesses; _processTimer.Start(); ProgramEnabled = false; Running = false; // Create and start the web server GameStateWebServer = new GameStateWebServer(logger); GameStateWebServer.Start(); // Start the named pipe PipeServer.Start("artemis"); // Start the update task var updateTask = new Task(Updater.UpdateApp); updateTask.Start(); Logger.Info("Intialized MainManager"); Logger.Info($"Artemis version {Assembly.GetExecutingAssembly().GetName().Version} is ready!"); }
/// <inheritdoc /> public void LogInfo <T>(T value) { if (IsInfoLoggingEnabled) { _logger?.Info(value); } }
public virtual void Execute(JobExecutionContext context) { Logger = new ServiceLogger(context.JobDetail.Name); if (Monitor.TryEnter(SYNC_LOCK, 3000) == false) { Logger.Debug("上一次调度未完成,本次调度放弃运行"); return; } try { Logger.Info("调度开始执行"); InnerExecute(context); Logger.Info("调度正常结束"); } catch (Exception e) { Logger.Error("调度执行时发生异常: " + e); } finally { Monitor.Exit(SYNC_LOCK); } }
/// <summary> /// Get the games full name from the zip file name. Ex: xmcota.zip will return "X-Men: Children of the Atom" /// </summary> /// <param name="path">The path</param> /// <param name="logger"></param> /// <returns>The games full name</returns> public static string GetFullNameFromPath(string path, ILogger logger) { if (_romNamesDictionary == null) { lock (LockObject) { // Build the dictionary if it's not already populated if (_romNamesDictionary == null) { logger.Info("GameBrowser: Initializing RomNamesDictionary"); _romNamesDictionary = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); logger.Info("GameBrowser: Building RomNamesDictionary"); BuildRomNamesDictionary(logger); } } } var shortName = Path.GetFileNameWithoutExtension(path); if (shortName != null) { string value; if (_romNamesDictionary.TryGetValue(shortName, out value)) { return value; } } return null; }
/// <summary> /// Called when an Update message has arrived, applies the new state to the entity. /// </summary> /// <param name="entityId">The unique identifier for the entity.</param> /// <param name="payload">The object that will be used to apply the entity's starting state.</param> /// <param name="isReckoning">True if this is a reckoning update.</param> /// <param name="time">The time the message was sent, used for projecting the state to current time.</param> private void UpdateEntity(long entityId, long ownerId, object payload, bool isReckoning, double time) { INetworkEntity targetEntity = mEntities.Where(e => e.EntityId == entityId && e.OwnerId == ownerId).SingleOrDefault(); // TODO: automatically create entity? // ignore if null, entity creation message may not have arrived mLog?.Info($"Receiving update from {ownerId} to update entity {payload?.GetType()}"); if (targetEntity != null) { if (targetEntity.OwnerId != this.NetworkId || isReckoning) { targetEntity.UpdateFromState(payload, time); } BroadcastIfServer(entityId, targetEntity.OwnerId, payload, isReckoning ? NetworkMessageType.Reckoning : NetworkMessageType.Update ); } else { mLog.Debug("Couldn't find entity to update: " + entityId); } }
// TODO: add methods for // delete single record // Delete an entity // Retrieve entities in pages asynchronously /// <summary> /// MS docs - https://docs.microsoft.com/en-us/azure/cosmos-db/table-storage-how-to-use-dotnet#retrieve-entities-in-pages-asynchronously /// </summary> // TODO: update that //https://stackoverflow.com/questions/17955557/painfully-slow-azure-table-insert-and-delete-batch-operations public void BatchUpdate <TEntity>(IList <TEntity> data, ILogger logger = null) where TEntity : ITableEntity { var groupedByPartition = data.GroupBy(x => x.PartitionKey).ToList(); foreach (var group in groupedByPartition) { var entities = group.Select(x => x).ToList(); logger?.Info("Saving for PartitionKey = " + group.Key + $" ({entities.Count}) records"); int rowOffset = 0; while (rowOffset < entities.Count) { var rows = entities.Skip(rowOffset).Take(100).ToList(); rowOffset += rows.Count; string partition = "$" + rowOffset.ToString(); var batch = new TableBatchOperation(); foreach (var row in rows) { batch.Replace(row); } CloudTable.ExecuteBatchAsync(batch); logger?.Info("Updated batch for partition " + partition); } } }
public async Task <CallbackResult> OnServiceCallback(HttpRequest request) { var ret = new CallbackResult() { OverallStatus = VerificationStatus.Pending, }; try { var result = new Dictionary <string, string>(); var raw = ""; using (var reader = new StreamReader(request.Body)) { raw = await reader.ReadToEndAsync(); } if (!Json.ParseInto(raw, result)) { throw new Exception("Failed to parse response"); } ret.OverallStatus = result["status_code"] == "1" ? VerificationStatus.Verified : VerificationStatus.NotVerified; ret.TicketId = result["reference"]; ret.ServiceStatus = result["status_code"]; ret.ServiceMessage = result["message"]; _logger?.Info($"Callback code={ result["status_code"] } for ref { result["reference"] }: { result["message"] }"); } catch (Exception e) { ret.OverallStatus = VerificationStatus.Fail; _logger?.Info(e, "Callback failure"); } return(ret); }
public T Retry <T>(Func <T> runTxFunc) { var exceptions = new List <Exception>(); var timer = new Stopwatch(); timer.Start(); var delayMs = _initialRetryDelayMs; var counter = 0; do { counter++; try { return(runTxFunc()); } catch (Exception e) when(e.IsRetriableError()) { exceptions.Add(e); var delay = TimeSpan.FromMilliseconds(ComputeDelayWithJitter(delayMs)); _logger?.Info("Transaction failed and will be retried in " + delay + "ms.", e); Thread.Sleep(delay); delayMs = delayMs * _multiplier; } } while (timer.Elapsed.TotalMilliseconds < _maxRetryTimeMs); timer.Stop(); throw new ServiceUnavailableException( $"Failed after retried for {counter} times in {_maxRetryTimeMs} ms. " + "Make sure that your database is online and retry again.", new AggregateException(exceptions)); }
internal static void Fix(ILogger?logger) { var path = new PathString(AppDomain.CurrentDomain.BaseDirectory ?? Environment.CurrentDirectory) .GetDirectoryName()?.GetDirectoryName()?.GetDirectoryName()?.GetDirectoryName(); if (path is null) { logger?.Warn("Could not load the solution path."); return; } logger?.Info($"The solution path is: {path}"); logger?.Info($"Ready to run."); ConsoleUtil.Pause(); LoadAndConvertFiles(logger, path, // Git Configurations ".gitignore", ".gitattributes", ".gitmodules", // Visual Studio Files ".sln", ".csproj", ".editorconfig", // C# Code Files and ResX Files ".cs", ".resx", // Document Files ".txt", ".md", // Batch Scripts ".cmd", // Data Description Language Files ".xsd", ".json", ".yml", // Others ".partial" ); }
public bool Process(ILogger logger, IEnumerable<string> args, MetaProjectPersistence metaProject, ComponentsList components, string packagesOutputDirectory) { var nugetNamePattern = args.FirstOrDefault(); if (nugetNamePattern == null || nugetNamePattern.StartsWith("-") || nugetNamePattern.EndsWith("\"")) { logger.Error("No nuget pattern specified"); return true; } var nugetComponent = components.FindComponent<INugetSpec>(nugetNamePattern); if (nugetComponent == null) return true; logger.Info("== Nuget to add: {0}", nugetComponent); var componentNamePattern = args.LastOrDefault(); if (componentNamePattern == null || componentNamePattern.StartsWith("-") || componentNamePattern.EndsWith("\"")) { logger.Error("No component pattern specified"); return true; } var specificComponent = components.FindComponent<IProject>(componentNamePattern); if (specificComponent == null) return true; logger.Info("== Component to reference nuget: {0}", specificComponent); if (specificComponent == nugetComponent) { logger.Error("Nuget can't be added to itself"); return true; } specificComponent.AddNuget(logger, nugetComponent, components, packagesOutputDirectory); return true; }
/// <summary> /// TODO: description /// </summary> /// <param name="disposing"></param> protected override void Dispose(bool disposing) { try { if (_isDisposed) { return; } _logger?.Info("Disconnecting"); if (!Options.HasFlag(XboxConnectionOptions.NotificationSession)) { // avoid port exhaustion by attempting to gracefully inform the xbox we're leaving SendCommandText("bye"); } if (disposing) { Reader?.Dispose(); Writer?.Dispose(); Stream?.Dispose(); } // dispose any unmanaged resources } finally { _isDisposed = true; base.Dispose(disposing); } }
public Bootstrapper() { Thread.CurrentThread.Name = "UI"; _loggerFactory = new LoggerFactory(); _logger = _loggerFactory.CreateLogger(GetType()); _logger.Info("-------------------------------------------------------------------------------"); _logger.Info("Starting application"); }
protected override void OnStart(string[] args) { logger = LogManager.GetLogger("文件监控服务启动"); logger.Info("文件监控服务准备启动"); g_FilePathWatch = new FileWatch(); g_FilePathWatch.Start(); logger.Info("文件监控服务启动完毕"); // TODO: Add code here to start your service. }
static Startup() { _logger = new Log4NetLogger(typeof(Startup)); _logger.Info("--------------------------------------------------------------------------------"); _logger.Info("--------------------------------------------------------------------------------"); _logger.Info("--------------------------------------------------------------------------------"); _logger.Info("Startup"); _container = Bootstrapper.Initialise(); }
public void Execute(ILogger logger) { logger.Info("AdoInspector: Starting to replace DbProviderFactory"); //This forces the creation try { DbProviderFactories.GetFactory("Anything"); } catch (ArgumentException ex) { } //Find the registered providers var table = Support.FindDbProviderFactoryTable(); //Run through and replace providers foreach (var row in table.Rows.Cast<DataRow>().ToList()) { DbProviderFactory factory; try { factory = DbProviderFactories.GetFactory(row); logger.Info("AdoInspector: Successfully retrieved factory - {0}", row["Name"]); } catch (Exception) { logger.Error("AdoInspector: Failed to retrieve factory - {0}", row["Name"]); continue; } //Check that we haven't already wrapped things up if (factory is GlimpseDbProviderFactory) { logger.Error("AdoInspector: Factory is already wrapped - {0}", row["Name"]); continue; } var proxyType = typeof(GlimpseDbProviderFactory<>).MakeGenericType(factory.GetType()); var newRow = table.NewRow(); newRow["Name"] = row["Name"]; newRow["Description"] = row["Description"]; newRow["InvariantName"] = row["InvariantName"]; newRow["AssemblyQualifiedName"] = proxyType.AssemblyQualifiedName; table.Rows.Remove(row); table.Rows.Add(newRow); logger.Info("AdoInspector: Successfully replaced - {0}", newRow["Name"]); } logger.Info("AdoInspector: Finished replacing DbProviderFactory"); }
internal void ProcessingData() { try { if (_lock) { return; } _lock = true; var method = UtilHelper.GetMethodName(MethodBase.GetCurrentMethod()); _logger?.Info($"{method} => start"); var watch = System.Diagnostics.Stopwatch.StartNew(); var data = DataReaderHelper.GetData( _dataReaders, _filtersValidator, _filtersValidatorRepository, _aggregatorsValidator, _aggregatorsValidatorRepository); if (!data.Any()) { _logger?.Debug("No data found..."); return; } var sendedMessages = new Dictionary <SendedMessages, int>(); foreach (var message in data) { if (!_clientsManager.Send(message)) { _logger?.Debug("No clients defined to send..."); return; } var sendedMessage = new SendedMessages { Name = message.LogicalStorage, Id = message.ReaderId }; sendedMessages.AddOrIncrement(sendedMessage, 1); } foreach (var message in sendedMessages) { _logger?.Debug($"Sended: {message.Value} messages from {message.Key}"); } watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; _logger?.Info($"{method} => end, Time taken: {elapsedMs}ms"); } catch (Exception ex) { _logger?.Error(ex.Message); } finally { _lock = false; } }
private static void Main() { Logger = LogManager.GetLogger(nameof(Program)); //This is for one log file per run (based on start time...) NLog.GlobalDiagnosticsContext.Set("StartTime", DateTime.Now.ToString("yyyy-MM-dd-HH:mm:ss")); var configPath = Path.Combine(Environment.CurrentDirectory, "Chaos.json"); if (File.Exists(configPath)) { _chaosConfiguration = GetChaosConfiguration(configPath); } if (_chaosConfiguration == null) { // if GetChaosConfiguration fails (which it logs...), then exit... Logger?.Info("No configruration supplied... Exiting..."); Environment.Exit(-1); } //Repeat setting if (_chaosConfiguration.Repeat > 0) { _repeat = _chaosConfiguration.Repeat; } //Start delay setting if (_chaosConfiguration.RunDelay > 0) { _runDelay = _chaosConfiguration.RunDelay; } try { //Must initialize Bedlam with a ChaosConfiguration... _bedlam = new Bedlam(_chaosConfiguration); } catch (Exception e) { Logger?.Info("Error initializing Bedlam: " + e.Message + "\n Exiting..."); Logger?.Error(e); Environment.Exit(-1); } for (int i = 0; i < _repeat + 1; i++) { if (_runDelay > 0) { Thread.Sleep(_runDelay * 1000); } _bedlam.Run(); } }
/// <summary> /// Creates Database if not exist and seeds data if needed /// </summary> public void Initialize() { _context.Database.CreateIfNotExists(); if (!_context.Users.Any()) { _logger?.Info("Seeding data..."); Seed(); _context.SaveChanges(); _logger?.Info("Finished seeding data."); } }
public void Warn(Exception cause, string message = "", params object[] args) { if (cause != null) { _logger?.Info(string.Format(message, args), cause); } else { _logger?.Info(string.Format(message, args)); } }
/// <summary> /// Safely add new feature : AddFeatureIfNotAlreadyAdded /// </summary> /// <param name="logger">ILogger logger</param> /// <param name="collection">SPFeatureCollection collection</param> /// <param name="featureGuid">Guid featureGuid</param> private void AddFeatureIfNotAlreadyAdded(ILogger logger, SPFeatureCollection collection, Guid featureGuid) { if (collection[featureGuid] == null) { logger.Info("Activating feature with Guid " + featureGuid.ToString()); collection.Add(featureGuid); } else { logger.Info("Skipping feature with Guid " + featureGuid.ToString()); } }
private static void RewriteAndCopyNuspecFile(ILogger?logger, PathString sourcePath, PathString targetPath, string ver, string name) { var src = sourcePath + $"{name}.{ver}.nupkg"; var dst = targetPath + $"{name}.{ver}.nupkg"; logger?.Info($"rewriting: {src}"); using (var nupkgf = OpenNupkgFile(src)) using (var stream = OpenNuspecFile(nupkgf, $"{name}.nuspec")) { var xd = new XmlDocument(); xd.Load(stream); var dep = xd["package"]["metadata"]["dependencies"]; dep.RemoveAll(); var group1 = CreateGroupElement(xd, dep, ".NETFramework4.8"); var group2 = CreateGroupElement(xd, dep, ".NETCoreApp3.1"); switch (name) { case "ExapisSOP": break; case "ExapisSOP.DemoApp": CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP"); CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP.NativeWrapper.Windows"); CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP.Tools.EncodingFixer"); CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP.Tools.PackageDependencyFixer"); CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP.Utils"); break; case "ExapisSOP.NativeWrapper.Windows": CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP"); break; case "ExapisSOP.Tools.EncodingFixer": CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP"); break; case "ExapisSOP.Tools.PackageDependencyFixer": CreateDependencyElement(xd, null, group2, "4.3.0", "System.IO.Compression"); CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP"); CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP.NativeWrapper.Windows"); break; case "ExapisSOP.Utils": CreateDependencyElement(xd, group1, group2, ver, "ExapisSOP"); break; } stream.Position = 0; stream.SetLength(0); xd.Save(stream); } logger?.Info($"copying from {src} to {dst}..."); File.Copy(src, dst, true); }
/// <summary> /// Gets raw HTTP response /// </summary> /// <param name="url">Url</param> /// <param name="method">HTTP Method</param> /// <param name="request">HTTP request</param> /// <param name="noOutput">Output will not be proceed when true, method return default(T)</param> /// <param name="token">Cancellation token</param> /// <returns>Task</returns> /// <exception cref="TaskCanceledException">When operation cancelled</exception> /// <exception cref="ConnectionException">When response from server does not indicate success</exception> private async Task <HttpResponseMessage> GetRawResponse([NotNull] string url, [NotNull] HttpMethod method, [CanBeNull] object request, CancellationToken token, bool noOutput = false) { await OnBeforeRequest(url, token).ConfigureAwait(false); HttpResponseMessage data = null; HttpStringContent requestcontent = null; string requestBody = null; var content = request as string; if (content != null) { requestcontent = new HttpStringContent(content); } else if (request != null) { requestBody = JsonConvert.SerializeObject(request, CreateJsonSerializerSettings()); requestcontent = new HttpStringContent(requestBody, UnicodeEncoding.Utf8, "application/json"); } try { var fullUrl = (new[] { "http://", "https://" }).Any(url.StartsWith) ? url : GetBaseUrl() + url; var client = CreateHttpClient(fullUrl); var requestMessage = new HttpRequestMessage { Method = method, RequestUri = new Uri(fullUrl), Content = requestcontent, }; Logger?.Info($"{method} {fullUrl}" + (requestBody != null ? "\r\n" + requestBody : "")); data = token == CancellationToken.None ? await client.SendRequestAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead) : await client.SendRequestAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead).AsTask(token); return(data); } catch (TaskCanceledException) { Logger?.Error("Requesting {url} cancelled"); throw; } catch (Exception ex) { Logger?.Error($"Error communicating with the server for {url}", ex); throw new ConnectionException("Error communicating with the server. See the inner exception for details.", ex, data?.StatusCode ?? HttpStatusCode.ExpectationFailed, null); } }
public bool Process(ILogger logger, IEnumerable<string> args, MetaProjectPersistence metaProject, ComponentsList components, string packagesOutputDirectory) { logger.Info("Directories that will be scanned:"); using (logger.Block) foreach (var dir in metaProject.ListOfDirectories) logger.Info(dir); logger.Info("Directories that won't be scanned:"); using (logger.Block) foreach (var dir in metaProject.ListOfExcludedDirectories) logger.Info(dir); Rescan(logger, metaProject, components); return true; }
public void Start() { _isActive = true; foreach (var listener in _listeners) { var tempListener = listener; Task.Factory.StartNew(() => Listen(tempListener)); } _logger?.Info("Embedded uhttpserver started."); }
private static void OnCancelKeyPress(object sender, ConsoleCancelEventArgs e) { logger?.Info(() => "SIGINT received. Exiting."); Console.WriteLine("SIGINT received. Exiting."); try { cts?.Cancel(); } catch { } e.Cancel = true; }
public Service() { InitializeContainer(); m_logger = Log.For(this); m_settings = new ServiceSettings(); m_logger.Info("Starting Service"); this.InitStore(); m_logger.Info("Service Started Successfully"); }
public override async Task GrantRefreshToken(OAuthGrantRefreshTokenContext context) { var identity = context.Ticket.Identity; var id = identity.GetUserId(); var domain = identity.FindFirst("ac_domain"); var companyToken = identity.FindFirst("c_token"); Identity.AdobeConnectUser user = null; using (Identity.AdobeConnectUserManager userManager = _userManagerFactory()) { try { user = await userManager.RefreshSession(id, companyToken.Value, domain.Value, identity.Name); _logger?.Info($"[GrantRefreshToken.AfterLogin] ACSession={user.AcSessionToken}"); } catch (Exception ex) { // TODO: production-ready exceptions context.SetError("server_error", ex.Message); return; } } if (user == null) { context.SetError("token_refresh_error", "User session has not been updated successfully."); return; } // check for existing claim and remove it var existingClaims = identity.Claims.Where(x => x.Type == "ac_session" || x.Type == ClaimTypes.Role); foreach (var existingClaim in existingClaims) { identity.RemoveClaim(existingClaim); } identity.AddClaim(new Claim("ac_session", user.AcSessionToken)); if (user.Roles != null) { foreach (var role in user.Roles) { var roleClaim = new Claim(ClaimTypes.Role, role); identity.AddClaim(roleClaim); } } context.Validated(context.Ticket); _logger?.Info($"[GrantRefreshToken.Success] PrincipalId={user.Id}, ACSession={context.Ticket.Identity.FindFirst("ac_session")}"); // return Task.FromResult<object>(null); }
protected virtual void UpdateSearchIndexes(Item[] items, ILogger logger) { logger?.Info(""); logger?.Info("[I] Updating search indexes for changed items."); foreach (var index in ContentSearchManager.Indexes) { var changes = items.Select(change => new SitecoreItemUniqueId(change.Uri)); IndexCustodian.IncrementalUpdate(index, changes); } logger?.Debug($"> Queued updates for {items.Length} items in the search indexes. Will run async."); }
private static void Main() { Logger = LogManager.GetLogger(nameof(Program)); var configPath = Path.Combine(Environment.CurrentDirectory, "Chaos.config"); if (File.Exists(configPath)) { _chaosConfiguration = GetChaosConfiguration(configPath); } if (_chaosConfiguration == null) { // if GetChaosConfiguration fails (which it logs...), then exit... Logger?.Info("No configruration supplied... Exiting..."); Environment.Exit(-1); } //Repeat setting if (_chaosConfiguration.Repeat > 0) { _repeat = _chaosConfiguration.Repeat; } //Start delay setting if (_chaosConfiguration.RunDelay > 0) { _runDelay = _chaosConfiguration.RunDelay; } try { //Must initialize Bedlam instance with ChaosConfiguration instance... _bedlam = new Bedlam(_chaosConfiguration); } catch (Exception e) { Logger?.Info("Error initializing Bedlam: " + e.Message + "\n Exiting..."); Environment.Exit(-1); } for (int i = 0; i < _repeat + 1; i++) { if (_runDelay > 0) { Thread.Sleep(_runDelay * 1000); } _bedlam.Run(); } }
public static void RegisterHubs(IAppBuilder app, IUnityContainer container, ILogger logger) { logger.Info("Overriding default JSON serialization settings (PascalCase->camelCase)"); SetJsonSerializerSettings(container); logger.Info("SignalR Hubs registration starting ..."); var config = new HubConfiguration { Resolver = new UnitySignalRDependencyResolver(container) }; app.MapSignalR(config); logger.Info("SignalR Hubs registered."); }
public bool ValidateSuggestedBlock(Block suggestedBlock) { if (!_ommersValidator.Validate(suggestedBlock.Header, suggestedBlock.Ommers)) { _logger?.Info($"Invalid block ({suggestedBlock.Hash}) - invalid ommers"); return(false); } foreach (Transaction transaction in suggestedBlock.Transactions) { if (!_transactionValidator.IsWellFormed(transaction, _specProvider.GetSpec(suggestedBlock.Number))) { _logger?.Info($"Invalid block ({suggestedBlock.Hash}) - invalid transaction ({transaction.Hash})"); return(false); } } // TODO it may not be needed here (computing twice?) if (suggestedBlock.Header.OmmersHash != Keccak.Compute(Rlp.Encode(suggestedBlock.Ommers))) { _logger?.Info($"Invalid block ({suggestedBlock.Hash}) - invalid ommers hash"); return(false); } bool blockHeaderValid = _headerValidator.Validate(suggestedBlock.Header); if (!blockHeaderValid) { _logger?.Info($"Invalid block ({suggestedBlock.Hash}) - invalid header"); return(false); } return(true); }
private static void OnCancelKeyPress(object sender, ConsoleCancelEventArgs e) { logger?.Info(() => "SIGINT received. Exiting."); Console.WriteLine("SIGINT received. Exiting."); try { cts.Cancel(); Thread.Sleep(3000); } catch { Console.WriteLine("Not able to shut down. Please kill the processes manually."); } }
public async Task OnBlockAppended(IBlock block) { // TODO: OnBlockIrreversible instead if (!_bloom.IsIn(new Bloom(block.Header.Bloom.ToByteArray()))) { return; } var chainId = block.Header.ChainId; var infos = new List <SideChainInfo>(); foreach (var txId in block.Body.Transactions) { var res = await TransactionResultManager.GetTransactionResultAsync(txId); infos.AddRange(GetInterestedEvent(res)); } foreach (var info in infos) { _logger?.Info($"Chain creation event: {info}"); try { var response = await SendChainDeploymentRequestFor(info.ChainId, chainId); if (response.StatusCode != HttpStatusCode.OK) { _logger?.Error( $"Sending sidechain deployment request for {info.ChainId} failed. " + $"StatusCode: {response.StatusCode}." ); } else { _logger?.Info( $"Successfully sent sidechain deployment request for {info.ChainId}. " + $"Management API return message: {await response.Content.ReadAsStringAsync()}." ); // insert await _chainManagerBasic.AddSideChainId(info.ChainId); } } catch (Exception e) { _logger?.Error(e, $"Sending sidechain deployment request for {info.ChainId} failed due to exception."); } } }
public bool Validate(BlockHeader header, BlockHeader[] ommers) { if (ommers.Length > 2) { _logger?.Info($"Invalid block ({header.ToString(BlockHeader.Format.Full)}) - too many ommers"); return(false); } if (ommers.Length == 2 && ommers[0].Hash == ommers[1].Hash) { _logger?.Info($"Invalid block ({header.ToString(BlockHeader.Format.Full)}) - duplicated ommer"); return(false); } for (int i = 0; i < ommers.Length; i++) { BlockHeader ommer = ommers[i]; if (!_headerValidator.Validate(ommer, true)) { _logger?.Info($"Invalid block ({header.ToString(BlockHeader.Format.Full)}) - ommer's header invalid"); return(false); } if (!IsKin(header, ommer, 6)) { _logger?.Info($"Invalid block ({header.ToString(BlockHeader.Format.Full)}) - ommer just pretending to be ommer"); return(false); } Block ancestor = _blockTree.FindBlock(header.ParentHash, false); for (int ancestorLevel = 0; ancestorLevel < 5; ancestorLevel++) { if (ancestor == null) { break; } if (ancestor.Ommers.Any(o => o.Hash == ommer.Hash)) { _logger?.Info($"Invalid block ({header.ToString(BlockHeader.Format.Full)}) - ommers has already been included by an ancestor"); return(false); } ancestor = _blockTree.FindBlock(ancestor.Header.ParentHash, false); } } return(true); }
protected virtual void UpdateSearchIndexes(Item[] items, ILogger logger) { logger?.Info(""); logger?.Info("[I] Updating search indexes for changed items."); foreach (var index in ContentSearchManager.Indexes) { var changes = items.Select(change => new SitecoreItemUniqueId(change.Uri)); ReflectionUtil.CallMethod(typeof(IndexCustodian), "IncrementalUpdate", true, true, true, new object[] { index, changes }); //IndexCustodian.IncrementalUpdate(index, changes); } logger?.Debug($"> Queued updates for {items.Length} items in the search indexes. Will run async."); }
static void Main(string[] args) { LogManager.ThrowExceptions = true; var config = LogManager.Configuration; Logger = LogManager.GetLogger("MiniServer"); Logger.Info("Read arguments."); ReadArgiments(args); Logger.Info("Initialises server."); GameServer.Instance.Initialise(); Logger.Info("Start server."); GameServer.Instance.StartServer(); }
/// <summary> /// Attempt to start the RS232 polling loop /// </summary> /// <returns>True when loop starts</returns> public bool StartPollingLoop() { lock (_mutex) { if (_isRunning) { Logger?.Error("{0} Already polling, ignoring start request", GetType().Name); return(false); } if (!SerialProvider.TryOpen()) { Logger?.Error("{0} Failed to open serial provider", GetType().Name); return(false); } _isRunning = true; } _rs232Worker = new Thread(MainLoop) { // Terminate if our parent thread dies IsBackground = true }; _rs232Worker.Start(); if (Config.DisableLivenessCheck) { Logger?.Info("{0} Polling thread started (no liveness check): {1}", GetType().Name, _rs232Worker.ManagedThreadId); } else { // RS-232 does not have a "ping" concept so instead we wait for a // number of healthy messages before telling the caller that the // message loop has "started successfully". if (!_deviceIsReady.WaitOne(Config.PollingPeriod._Multiply(5))) { Logger?.Info("{0} timed out waiting for a valid polling response", GetType().Name); return(false); } Logger?.Info("{0} Polling thread started: {1}", GetType().Name, _rs232Worker.ManagedThreadId); } return(true); }
static void InitializeENodeFramework() { var assemblies = new[] { Assembly.Load("NoteSample.Domain"), Assembly.Load("NoteSample.Commands"), Assembly.Load("NoteSample.CommandHandlers"), Assembly.GetExecutingAssembly() }; _configuration = Configuration .Create() .UseAutofac() .RegisterCommonComponents() .UseLog4Net() .UseJsonNet() .RegisterUnhandledExceptionHandler() .CreateENode() .RegisterENodeComponents() .RegisterBusinessComponents(assemblies) .UseEQueue() .InitializeBusinessAssemblies(assemblies) .StartEQueue(); Console.WriteLine(string.Empty); _logger = ObjectContainer.Resolve<ILoggerFactory>().Create(typeof(Program).Name); _logger.Info("ENode started..."); }
public RuntimeSystemManager(IEnumerable<ISystem> systems, ILogger logger) { _logger = logger; _systems = systems.Select(p => new RuntimeSystemInstance(p)).ToList(); _logger.Info("Initialised with systems: {0}", string.Join(", ", _systems.Select(p => p.System.Name))); }
private void Bootstrap() { _jobModelRegistry = new ConcurrentDictionary<string, JobModel>(); _compositionContainer = new CatalogConfigurator() .AddAssembly(Assembly.GetExecutingAssembly()) .AddNestedDirectory(Config.JobsFolderName) .BuildContainer(); _compositionContainer.ComposeParts(this); InitTasksRegistry(); _appContainerBuilder = new ContainerBuilder(); _appContainerBuilder.RegisterModule<WorkerModule>(); _appContainerBuilder.RegisterModule<HostingModule>(); _appContainer = _appContainerBuilder.Build(); //TODO: make onchanged to an event _fileSystemWatcher = new JobsWatcher { OnChanged = OnChanged }; _fileSystemWatcher.Watch(TasksFolderPath); _logger = _appContainer.Resolve<ILogger>(); _logger.Info("[START] PanteonEngine"); Task.Run(() => MountApi()); }
public TestContext(Composer composer, TestFilesRepository testFilesRepository, string testFolder) { if (composer == null) throw new ArgumentNullException(nameof(composer)); if (string.IsNullOrWhiteSpace(testFolder)) throw new ArgumentNullException(nameof(testFolder)); _viewModel = composer.Compose(); _composer = composer; _rootFolder = testFolder; _sourceDirectory = Path.Combine(_rootFolder, "Source"); _targetDirectory = Path.Combine(_rootFolder, "Target"); _testFilesRepository = testFilesRepository; if (!Directory.Exists(SourceDirectory)) { Directory.CreateDirectory(SourceDirectory); } if (!Directory.Exists(TargetDirectory)) { Directory.CreateDirectory(TargetDirectory); } _logger = _composer.Resolve<Func<string, ILogger>>()("TestContext"); _logger.Info("Root folder is " + _rootFolder); _logger.Info("Test context hashcode is " + GetHashCode()); }
public static async Task <T> WithRetriesAsync <T>(Func <Task <T> > action, int maxAttempts = 5, TimeSpan?retryInterval = null, CancellationToken cancellationToken = default(CancellationToken), ILogger logger = null) { if (action == null) { throw new ArgumentNullException(nameof(action)); } int attempts = 1; var startTime = SystemClock.UtcNow; do { if (attempts > 1) { logger?.Info($"Retrying {attempts.ToOrdinal()} attempt after {SystemClock.UtcNow.Subtract(startTime).TotalMilliseconds}ms..."); } try { return(await action().AnyContext()); } catch (Exception ex) { if (attempts >= maxAttempts) { throw; } logger?.Error(ex, $"Retry error: {ex.Message}"); await SystemClock.SleepAsync(retryInterval ?? TimeSpan.FromMilliseconds(attempts * 100), cancellationToken).AnyContext(); } attempts++; } while (attempts <= maxAttempts && !cancellationToken.IsCancellationRequested); throw new TaskCanceledException("Should not get here."); }
public void Start() { logger = LoggerFactory.GetLogger("SimpleHttpServer.Server"); logger.Info("Server starting on port {0}", port); if (listener != null) { logger.Fatal("Server already started"); throw new InvalidOperationException("Already started"); } listener = new HttpListener(); listener.Prefixes.Add(string.Format("http://*:{0}/", port)); try { listener.Start(); } catch(Exception ex) { logger.Fatal("Error starting server", ex); throw; } logger.Info("Server started"); logger.Debug("Waiting for first request"); listener.BeginGetContext(ProcessRequest, null); }
protected override void Setup() { LogManager.SetLoggerFactory(Log4NetLoggerFactory.Instance); GlobalContext.Properties["LogFileName"] = ApplicationName + System.DateTime.Now.ToString("yyyy-MM-dd"); XmlConfigurator.ConfigureAndWatch(new FileInfo(Path.Combine(BinaryPath, "log4net.config"))); Logger = LogManager.GetLogger(this.GetType().FullName); Logger.Info("Setup begin."); _Framework = _Setup(); _Framework.Launch(); _FrameworkThread = new System.Threading.Thread(_FrameworkUpdate); _FrameworkThread.Priority = System.Threading.ThreadPriority.Normal; _FrameworkThread.Start(); Logger.Info("Setup end."); }
public GDIImageEncoder(IFileSystem fileSystem, ILogger logger) { _fileSystem = fileSystem; _logger = logger; _logger.Info("GDI image processor initialized"); }
private void UploadToDocuments(AttachmentStream file, string contentType, MailBox mailbox, ILogger log) { try { var uploadedFileId = ApiHelper.UploadToDocuments(file.FileStream, file.FileName, contentType, mailbox.EMailInFolder, true); log.Debug("ApiHelper.UploadToDocuments() -> uploadedFileId = {0}", uploadedFileId); } catch (ApiHelperException ex) { if (ex.StatusCode == HttpStatusCode.NotFound || ex.StatusCode == HttpStatusCode.Forbidden) { log.Info("ApiHelper.UploadToDocuments() EMailIN folder '{0}' is unreachable. Try to unlink EMailIN...", mailbox.EMailInFolder); SetMailboxEmailInFolder(mailbox.TenantId, mailbox.UserId, mailbox.MailBoxId, null); mailbox.EMailInFolder = null; CreateUploadToDocumentsFailureAlert(mailbox.TenantId, mailbox.UserId, mailbox.MailBoxId, (ex.StatusCode == HttpStatusCode.NotFound) ? UploadToDocumentsErrorType .FolderNotFound : UploadToDocumentsErrorType .AccessDenied); throw; } log.Error("SaveEmailInData->ApiHelper.UploadToDocuments(fileName: '{0}', folderId: {1}) Exception:\r\n{2}\r\n", file.FileName, mailbox.EMailInFolder, ex.ToString()); } }
public Service1() { InitializeComponent(); InitializeEQueue(); _logger = ObjectContainer.Resolve<ILoggerFactory>().Create(GetType().FullName); _logger.Info("Service initialized."); }
internal void AddAuthentifiedPeer(IPeer peer) { if (peer == null) { _logger?.Warn("Peer is null, cannot add."); return; } if (!peer.IsAuthentified) { _logger?.Warn($"Peer not authentified, cannot add {peer}"); return; } lock (_peerListLock) { _authentifyingPeer.Remove(peer); if (GetPeer(peer) != null) { peer.Dispose(); // todo return; } _peers.Add(peer); } _logger?.Info($"Peer authentified and added : {{ addr: {peer}, key: {peer.DistantNodeAddress.ToHex()}, bp: {peer.IsBp} }}"); peer.MessageReceived += OnPeerMessageReceived; PeerEvent?.Invoke(this, new PeerEventArgs(peer, PeerEventType.Added)); }
private SchedulerHost(bool isPrimary, Func<Type,object> containerResolve, Func<IAppConfigRepository> repositoryResolve) { _traceSource = new VirtoCommerceTraceSource("VirtoCommerce.ScheduleService.Trace"); Trace.TraceInformation("SchedulerHost constructor started"); try { _jobScheduler = new JobScheduler(isPrimary, t => { object o; try { o = containerResolve(t); } catch (Exception ex) { _traceSource.Error(ex.ToString()); throw; } return (IJobActivity)o; }, repositoryResolve,_traceSource ); // reuse host container } catch (Exception ex) { _traceSource.Error(ex.ToString()); throw; } _traceSource.Info("SchedulerHost constructor finished"); }
private void FixProjectFiles() { _logger?.Info($"Trying to fix the Assembly and Project of the known projects"); if (Packages.Count() == 0) { _logger?.Info($"No NuGet package found. If this is not correct, it might be because this method was called before installing the NuGet packages."); } foreach (var csProj in Projects) { _logger?.Info($"Trying to fix the Assembly and Project reference of {csProj.Name}"); csProj.TryFixProjectFileAndGatherReferences(AllPackages); csProj.SaveCsProjectToFile(); } }
public TradeService(ILogger logger,TradeGenerator tradeGenerator, ISchedulerProvider schedulerProvider) { _logger = logger; _tradeGenerator = tradeGenerator; _schedulerProvider = schedulerProvider; //construct a cache specifying that the primary key is Trade.Id _tradesSource = new SourceCache<Trade, long>(trade => trade.Id); //call AsObservableCache() to hide the update methods as we are exposing the cache _all = _tradesSource.AsObservableCache(); //create a derived cache _live = _tradesSource.Connect(trade => trade.Status == TradeStatus.Live).AsObservableCache(); //code to emulate an external trade provider var tradeLoader = GenerateTradesAndMaintainCache(); //expire closed items from the cache ro avoid unbounded data var expirer = _tradesSource .ExpireAfter(t => t.Status == TradeStatus.Closed ? TimeSpan.FromMinutes(1) : (TimeSpan?)null,TimeSpan.FromMinutes(1),schedulerProvider.TaskPool) .Subscribe(x=>_logger.Info("{0} filled trades have been removed from memory",x.Count())); //log changes var loggerWriter = LogChanges(); _cleanup = new CompositeDisposable(_all, _tradesSource, tradeLoader, loggerWriter, expirer); }
public AtomicWatch(string subName = null, long total = 0, [CallerMemberName] string name = null) { Logger = Loggers.All; Name = name; SubName = subName; Total.Value = total; Task.Run(async() => { while (!_running.Completed) { try { var count = _count.Value; await Task.WhenAny(Task.Delay(Interval), _running.Source.AsTask()); count = _count.Value - count; var result = GetResult(); Report?.Invoke(result); Logger?.Info(result); } catch (Exception e) { if (Logger is ILogger l) { l.Error(e); } else { Loggers.Error(e); } } } }); }
/// <summary> /// Logs the response. /// </summary> /// <param name="logger">The logger.</param> /// <param name="statusCode">The status code.</param> /// <param name="url">The URL.</param> /// <param name="endPoint">The end point.</param> /// <param name="duration">The duration.</param> public static void LogResponse(ILogger logger, int statusCode, string url, string endPoint, TimeSpan duration) { var durationMs = duration.TotalMilliseconds; var logSuffix = durationMs >= 1000 ? "ms (slow)" : "ms"; logger.Info("HTTP Response {0} to {1}. Time: {2}{3}. {4}", statusCode, endPoint, Convert.ToInt32(durationMs).ToString(CultureInfo.InvariantCulture), logSuffix, url); }
void Run() { #region init ILoggerFactory fac = new NLoggerFactory(); logger = fac.GetLogger(this.GetType()); ExitCatcher catcher = new ExitCatcher(); catcher.Subscribe(() => { logger.Info("Daemon back to limbo\n"); }); logger.Info("Daemon wakes up"); IConfig config = new JsonConfig(fac); config.Load(Properties.Settings.Default.LOCALES); #endregion #region registeration logger.Info("Daemon assembles"); Container container = new Container(); container.Register<ILoggerFactory, NLoggerFactory>(); container.Register<IEar>(() => {return new ConsoleEar(fac, config.Get<float>("minaccuracy"));}); container.Register<IMouth, SimpleMouth>(); container.Register<INerve>(() => { return new DebugNerve(fac, config.Get<string>("server")); }); container.Register<ICortex, SimpleCortex>(); container.Register<ISimulacre, Simulacre>(); #endregion #region startup logger.Info("Daemon summons Charlie\n"); Personae charlie = new Personae { Name = "Charlie", Grammar = "charlieGrammar.xml", Voice = "Microsoft Hortense Desktop" }; container.GetInstance<ISimulacre>().Summon(charlie); #endregion }