public ElasticCasFactory(string name) { // Init Log Logger = LogManager.GetLogger(name); if (Settings.Exists()) { Logger.InfoFormat("Using ElasticSearch Host : {0}", Settings.ElasticSearchServer); } else { Logger.InfoFormat("No ElasticSearch Host specified, using default : {0}", Settings.ElasticSearchServer); } client = new ElasticClientWrapper(); Logger.InfoFormat("New ElasticSearch Connection from {0}", name); }
private static void RunService(CommandLineParser parser, Action<StringDictionary> environment, ILog logger) { var service = new ServiceController(parser.Target); if (service.Status != ServiceControllerStatus.Stopped) { logger.ErrorFormat("The service '{0}' is already running. The profiler cannot attach to an already running service.", parser.Target); return; } // now to set the environment variables var profilerEnvironment = new StringDictionary(); environment(profilerEnvironment); var serviceEnvironment = new ServiceEnvironmentManagement(); try { serviceEnvironment.PrepareServiceEnvironment(parser.Target, (from string key in profilerEnvironment.Keys select string.Format("{0}={1}", key, profilerEnvironment[key])).ToArray()); // now start the service service = new ServiceController(parser.Target); service.Start(); logger.InfoFormat("Service starting '{0}'", parser.Target); service.WaitForStatus(ServiceControllerStatus.Running, new TimeSpan(0, 0, 30)); logger.InfoFormat("Service started '{0}'", parser.Target); } finally { // once the serice has started set the environment variables back - just in case serviceEnvironment.ResetServiceEnvironment(); } // and wait for it to stop service.WaitForStatus(ServiceControllerStatus.Stopped); logger.InfoFormat("Service stopped '{0}'", parser.Target); }
private async void Start() { var splash = new SplashWindow(); splash.Show(); var bootstrapper = new Bootstrapper(); var container = bootstrapper.Build(); Log.Info("Initializing reactive trader API..."); var sw = Stopwatch.StartNew(); var reactiveTraderApi = container.Resolve <IReactiveTrader>(); var username = container.Resolve <IUserProvider>().Username; reactiveTraderApi.Initialize(username, container.Resolve <IConfigurationProvider>().Servers, container.Resolve <ILoggerFactory>()); Log.InfoFormat("Reactive trader API initialized in {0}ms", sw.ElapsedMilliseconds); MainWindow = new MainWindow(); var shellViewModel = container.Resolve <IShellViewModel>(); MainWindow.Content = new ShellView(shellViewModel); await Task.Delay(TimeSpan.FromSeconds(1.5)); splash.Close(); MainWindow.Show(); Log.InfoFormat("Main UI displayed {0}ms after process start.", DateTime.Now - Process.GetCurrentProcess().StartTime); }
private void ServiceHost_Closed(object sender, EventArgs e) { ServiceHost host = sender as ServiceHost; Debug.Assert(host != null, "host is null"); logger.InfoFormat("host {0} closed", host.Description); }
protected override void OnStart(string[] args) { log.Info("Start service"); try { InitializeComponent(); cancelTokenSource = new CancellationTokenSource(); string rabbitMQServerName = ConfigurationManager.AppSettings["rabbitMQ31ServerName"]; workers = new Worker[workerCount]; rabbitMQServer = RabbitMQManager.GetRabbitMQServer(rabbitMQServerName); QT.Moduls.LogCassandra.LogCrawler logCass = new QT.Moduls.LogCassandra.LogCrawler(); var db = new QT.Entities.Data.SqlDb(this.connectionString); for (int i = 0; i < workerCount; i++) { log.InfoFormat("Start worker {i}", i.ToString()); var worker = new Worker(CrawlerProductLog, false, rabbitMQServer); workers[i] = worker; var token = this.cancelTokenSource.Token; Task workerTask = new Task(() => { worker.JobHandler = (updateDatafeedJob) => { try { token.ThrowIfCancellationRequested(); QT.Entities.CrawlerProduct.RabbitMQ.MssLogCassandra mss = QT.Entities.CrawlerProduct.RabbitMQ.MssLogCassandra.GetDataFromMessage(updateDatafeedJob.Data); logCass.SaveLogToCassandra(mss.log, (QT.Moduls.LogCassandra.LogCode)mss.logCode, (QT.Moduls.LogCassandra.TypeLog)mss.typeLog, mss.data_id, mss.data_second_id, null, mss.session); log.InfoFormat("Log crawler company {0} : {1} ", mss.data_id, mss.data_second_id); return(true); } catch (OperationCanceledException opc) { log.Info("End worker"); return(true); } catch (Exception ex01) { log.Info(ex01); return(true); } }; worker.Start(); }, token); workerTask.Start(); log.InfoFormat("Worker {0} started", i); } } catch (Exception ex) { log.Error("Start error", ex); throw; } }
/// <summary> /// Connects to ZooKeeper server /// </summary> /// <param name="watcher"> /// The watcher to be installed in ZooKeeper. /// </param> public void Connect(IWatcher watcher) { if (this.disposed) { throw new ObjectDisposedException(this.GetType().Name); } lock (this.syncLock) { if (this._zkclient != null) { throw new InvalidOperationException("ZooKeeper client has already been started"); } try { Logger.InfoFormat("Starting ZK client .. with connect handler.. {0}...", watcher.ToString()); this._zkclient = new ZooKeeper(this.Servers, new TimeSpan(0, 0, 0, 0, this.SessionTimeout), watcher);//new ZkClientState(this.Servers, new TimeSpan(0, 0, 0, 0, this.SessionTimeout), watcher); Logger.InfoFormat("Finish start ZK client .. with connect handler.. {0}...", watcher.ToString()); } catch (IOException exc) { throw new ZooKeeperException("Unable to connect to " + this.Servers, exc); } } }
protected NetworkHelperStatus LoginMaster() { NetworkHelperStatus rv = networkHelper.LoginMaster(loginSettings); log.InfoFormat("Login return: {0}", rv); switch (rv) { case NetworkHelperStatus.Success: break; case NetworkHelperStatus.LoginFailure: StatusMessage = ""; ErrorMessage = "Invalid username or password"; break; case NetworkHelperStatus.MasterTcpConnectFailure: StatusMessage = ""; ErrorMessage = "Unable to connect to master tcp server"; break; default: StatusMessage = ""; ErrorMessage = "Unable to login"; break; } return(rv); }
public void Info(string message, params object[] formatting) { if (_logger.IsInfoEnabled) { _logger.InfoFormat(decorate_message_with_audit_information(message), formatting); } }
private static void PrintInputParameters() { foreach (KeyValuePair <string, string> kv in InputParameters) { Log.InfoFormat("{0}={1}", kv.Key, kv.Value); } }
public FetchResponse Fetch(FetchRequest request) { short tryCounter = 1; while (tryCounter <= this.config.NumberOfTries) { try { Logger.Debug("Fetch is waiting for send lock"); lock (this) { Logger.Debug("Fetch acquired send lock. Begin send"); return(connection.Send(request)); } } catch (Exception ex) { //// if maximum number of tries reached if (tryCounter == this.config.NumberOfTries) { throw; } tryCounter++; Logger.InfoFormat("Fetch reconnect due to {0}", ex.FormatException()); } } return(null); }
//[zk: localhost(CONNECTED) 12] get /brokers/topics/mvlogs //{"version":1,"partitions":{"1":[3,2],"0":[2,3]}} public static Dictionary <int, int[]> GetTopicMetadataInzookeeper(ZooKeeperClient zkClient, string topic) { Dictionary <int, int[]> treturn = new Dictionary <int, int[]>(); try { string data = zkClient.ReadData <string>(string.Format("/brokers/topics/{0}", topic), true); Dictionary <string, object> ctx = new JavaScriptSerializer().Deserialize <Dictionary <string, object> >(data); Type ty = ctx["partitions"].GetType(); //Logger.InfoFormat("The type for partitions :{0}", ty.FullName); Dictionary <string, object> tpartitons = (Dictionary <string, object>)ctx["partitions"]; foreach (KeyValuePair <string, object> kv in tpartitons) { int partitionID = Convert.ToInt32(kv.Key); //Logger.InfoFormat("The type for partitions value :{0}", kv.Value.GetType().FullName); ArrayList rep = (ArrayList)kv.Value; int[] partitionReplicas = new int[rep.Count]; for (int i = 0; i < rep.Count; i++) { partitionReplicas[i] = Convert.ToInt32(rep[i]); } treturn.Add(partitionID, partitionReplicas); } Logger.InfoFormat("Get topic data directly from zookeeper Topic:{0} Data:{1} Partition count:{2}", topic, data, treturn.Count); } catch (Exception ex) { Logger.Error("Failed to get topic " + topic + " data directly from zookeeper: " + ex.FormatException()); } return(treturn); }
private void AssignToReviewer(Models.JobApplication applicant) { if (applicant == null || applicant.ReviewStatus != ReviewStatus.New) { return; } var applicantService = new JobApplicationProvider(); var positionTypeService = new JobPositionTypeProvider(); var reviewerService = new ReviewerProvider(); var ghDataStore = GreenHouseDataStore.Instance; var job = ghDataStore.GetJobById(applicant.JobId); if (job == null || job.Status != JobStates.Open) { applicant.ReviewStatus = ReviewStatus.JobClosed; applicantService.Update(applicant); _logger.InfoFormat("Applicant {0} is closed because it's job ({1}) is either deleted or not opened", applicant.Id, job?.Id); return; } if (applicant.Source == ApplicantSources.Referral && AppConfigsProvider.ReferralsHandlingConfigs.IsEnabled) { applicant.ReviewStatus = ReviewStatus.HandledAsReferral; applicantService.Update(applicant); SendEmailForReferral(applicant, job); _logger.InfoFormat("Applicant {0} of job {1} has been handled as a referral", applicant.Id, job.Id); return; } var positionTypeStr = job.GetCustomFieldValue(JobCustomFields.PositionType); var positionType = positionTypeService.GetList(p => p.Name == positionTypeStr).FirstOrDefault(); if (positionType == null) { _logger.WarnFormat("Position Type '{0}' is not available.", positionTypeStr); return; } var selectedReviewer = GetNextReviewer(positionType); if (selectedReviewer == null) { _logger.WarnFormat("Can not get any reviewers for the applicant {0}. It's either because there is no reviewers configured for this position type or all reviewers all not in working hours.", applicant.Id); return; } selectedReviewer.RecentAssignedAt = DateTime.UtcNow; selectedReviewer.AssignedCount++; applicant.ReviewerId = selectedReviewer.Id; applicant.ReviewStatus = ReviewStatus.Assigned; applicant.AssignedToReviewerAt = DateTime.UtcNow; applicantService.Update(applicant); reviewerService.Update(selectedReviewer); _logger.InfoFormat("Applicant {0} has been assigned to reviewer {1}", applicant.Id, selectedReviewer.Id); SendAssignmentEmail(selectedReviewer, applicant, job); }
protected override void OnStart(string[] args) { log.Info("Start service"); try { InitializeComponent(); cancelTokenSource = new CancellationTokenSource(); string rabbitMQServerName = ConfigurationManager.AppSettings["rabbitMQServerName"]; workers = new Worker[workerCount]; rabbitMQServer = RabbitMQManager.GetRabbitMQServer(rabbitMQServerName); Encoding enc = new UTF8Encoding(true, true); for (int i = 0; i < workerCount; i++) { log.InfoFormat("Start worker {i}", i.ToString()); var worker = new Worker(ChangePriceToRedisJobName, false, rabbitMQServer); workers[i] = worker; var token = this.cancelTokenSource.Token; Task workerTask = new Task(() => { worker.JobHandler = (downloadImageJob) => { try { token.ThrowIfCancellationRequested(); JobRabbitChangePrice job = JsonConvert.DeserializeObject <JobRabbitChangePrice>(enc.GetString(downloadImageJob.Data)); WebRequest client = WebRequest.Create(string.Format(@"http://172.22.1.108:8983/api/PriceDetector/price_detect.htm?pn={0}", Uri.EscapeDataString(job.Name))); string strData = (new StreamReader(client.GetResponse().GetResponseStream()).ReadToEnd()); MsCheckPrice msCheckPrice = WSS.Service.CheckPriceProduct.MsCheckPrice.FromJSON(strData); bool failPrice = false; if (job.NewPrice > 0) { if (msCheckPrice.price > 100000) { if (job.NewPrice > msCheckPrice.price * (decimal)2 || job.NewPrice < msCheckPrice.price / (decimal)2) { failPrice = true; } log.Info(string.Format("CompanyID: {0} ProductID: {1} OldPrice: {2} OldPrice: {3} FailProduct: {4} SuggestPrice: {5}", job.CompanyID, job.ProductID, job.OldPrice, job.NewPrice, failPrice, msCheckPrice.price)); } } return(true); } catch (OperationCanceledException opc) { log.Info("End worker"); return(false); } }; worker.Start(); }, token); workerTask.Start(); log.InfoFormat("Worker {0} started", i); } } catch (Exception ex) { log.Error("Start error", ex); throw; } }
/// <summary> /// Build the geometry. /// </summary> /// <remarks> /// Based on all the entities which have been added, and the batching /// options which have been set, this method constructs the batched /// geometry structures required. The batches are added to the scene /// and will be rendered unless you specifically hide them. /// </remarks> /// <note> /// Once you have called this method, you can no longer add any more /// entities. /// </note> public void Build() { log.InfoFormat("Building new static geometry {0}", name); buildCount++; // Make sure there's nothing from previous builds Destroy(); // Firstly allocate meshes to regions foreach (QueuedSubMesh qsm in queuedSubMeshes) { Region region = GetRegion(qsm.worldBounds, true); region.Assign(qsm); } bool stencilShadows = false; if (castShadows && owner.IsShadowTechniqueStencilBased) { stencilShadows = true; } // Now tell each region to build itself geometryBucketCount = 0; foreach (Region region in regionMap.Values) { geometryBucketCount += region.Build(stencilShadows, logDetails); } log.InfoFormat("Finished building new static geometry {0}", name); Dump(); }
protected override void OnStart(string[] args) { log.Info("Start service"); try { InitializeComponent(); cancelTokenSource = new CancellationTokenSource(); string rabbitMQServerName = ConfigurationManager.AppSettings["rabbitMQServerName"]; workers = new Worker[workerCount]; rabbitMQServer = RabbitMQManager.GetRabbitMQServer(rabbitMQServerName); string connectToSQL = @"Data Source=172.22.30.86,1455;Initial Catalog=QT_2;Persist Security Info=True;User ID=qt_vn;Password=@F4sJ=l9/ryJt9MT;connection timeout=200"; string connectToConnection = @"Data Source=42.112.28.93;Initial Catalog=QT_2;Persist Security Info=True;User ID=wss_price;Password=HzlRt4$$axzG-*UlpuL2gYDu;connection timeout=200"; CrawlerProductAdapter crawlerProductAdapter = new CrawlerProductAdapter(new SqlDb(connectToSQL)); ProductAdapter productAdapter = new ProductAdapter(new SqlDb(connectToConnection)); Encoding enc = new UTF8Encoding(true, true); for (int i = 0; i < workerCount; i++) { log.InfoFormat("Start worker {i}", i.ToString()); var worker = new Worker(AddProductToSqlJobName, false, rabbitMQServer); workers[i] = worker; var token = this.cancelTokenSource.Token; Task workerTask = new Task(() => { worker.JobHandler = (downloadImageJob) => { try { token.ThrowIfCancellationRequested(); string strData = enc.GetString(downloadImageJob.Data); JobRabbitAddProduct job = JsonConvert.DeserializeObject <JobRabbitAddProduct>(strData); if (job.DateAdd == DateTime.MinValue) { job.DateAdd = productAdapter.GetLastChangeOfProduct(job.ProductID); } crawlerProductAdapter.SaveLogAddProduct(job.ProductID, job.DetailUrl, job.IDCompnay, job.Name, job.DateAdd); log.Info(string.Format("Log for {0}", strData)); return(true); } catch (OperationCanceledException opc) { log.Info("End worker"); return(false); } }; worker.Start(); }, token); workerTask.Start(); log.InfoFormat("Worker {0} started", i); } } catch (Exception ex) { log.Error("Start error", ex); throw; } }
protected override void OnStart(string[] args) { log.Info("Start service"); try { int numberQUeue = this.jobQueues.Count(); InitializeComponent(); cancelTokenSource = new CancellationTokenSource(); Server.LogConnectionString = ConfigurationManager.AppSettings["LogConnectionString"]; QT.Entities.Server.ConnectionString = connectionString; string rabbitMQServerName = ConfigurationManager.AppSettings["rabbitMQServerName"]; workers = new Worker[numberQUeue]; rabbitMQServer = RabbitMQManager.GetRabbitMQServer(rabbitMQServerName); for (int i = 0; i < numberQUeue; i++) { int indexQueue = i; log.InfoFormat("Start worker {i}", i.ToString()); var worker = new Worker(this.jobQueues[indexQueue], false, rabbitMQServer); workers[i] = worker; var token = this.cancelTokenSource.Token; Task workerTask = new Task(() => { string queueName = this.jobQueues[indexQueue]; QT.Moduls.Notifycation.NotifycationAdapter adapter = new QT.Moduls.Notifycation.NotifycationAdapter(); worker.JobHandler = (updateDatafeedJob) => { try { string strMessage = System.Text.Encoding.UTF8.GetString(updateDatafeedJob.Data, 0, updateDatafeedJob.Data.Length); adapter.InsertMessage(queueName, strMessage, 0); log.Info(string.Format("MSS:{0}. Queue:{1}", strMessage, queueName)); return(true); } catch (OperationCanceledException opc) { log.Info("End worker"); return(true); } catch (Exception ex01) { log.Error(ex01); return(true); } }; worker.Start(); }, token); workerTask.Start(); log.InfoFormat("Worker {0} started", i); } } catch (Exception ex) { log.Error("Start error", ex); throw; } }
/// <summary> /// Reads data from the network into the specified buffer. /// It will wait up to the specified number of milliseconds for data to arrive, /// if no data has arrived after the specified number of milliseconds then the function returns 0 /// </summary> /// <param name="buffer">The buffer.</param> /// <param name="timeoutMilliseconds">The timeout milliseconds.</param> /// <returns>The number of bytes read into the buffer</returns> /// <exception cref="System.Net.Sockets.SocketException">On connection reset</exception> protected virtual int ReadSome(byte[] buffer, int timeoutMilliseconds) { // NOTE: THIS FUNCTION IS EXACTLY THE SAME AS THE ONE IN SocketReader any changes here should // also be performed there try { // Begin read if it is not already started if (currentReadRequest_ == null) { currentReadRequest_ = stream_.BeginRead(buffer, 0, buffer.Length, callback: null, state: null); } // Wait for it to complete (given timeout) currentReadRequest_.AsyncWaitHandle.WaitOne(timeoutMilliseconds); if (currentReadRequest_.IsCompleted) { _log.InfoFormat("socketreader: receiving, thread id: {0}", Thread.CurrentThread.ManagedThreadId); // Make sure to set currentReadRequest_ to before retreiving result // so a new read can be started next time even if an exception is thrown var request = currentReadRequest_; currentReadRequest_ = null; int bytesRead = stream_.EndRead(request); if (0 == bytesRead) { throw new SocketException(System.Convert.ToInt32(SocketError.ConnectionReset)); } return(bytesRead); } else { return(0); } } catch (System.IO.IOException ex) // Timeout { var inner = ex.InnerException as SocketException; if (inner != null && inner.SocketErrorCode == SocketError.TimedOut) { // Nothing read return(0); } else if (inner != null) { throw inner; //rethrow SocketException part (which we have exception logic for) } else { throw; //rethrow original exception } } }
static void Main( ) { log.InfoFormat("\n"); log.InfoFormat("SCJMapper_V2 - Started"); Application.EnableVisualStyles( ); Application.SetCompatibleTextRenderingDefault(false); Application.Run(new MainForm( )); log.InfoFormat("SCJMapper_V2 - Ended\n"); }
protected override void OnStart(string[] args) { InitializeComponent(); log.Info("Start service"); Article art = new Article(); try { string rabbitMQServerName = ConfigurationManager.AppSettings["rabbitMQServerName"]; workers = new Worker[workerCount]; rabbitMQServer = RabbitMQManager.GetRabbitMQServer(rabbitMQServerName); articleConnectionString = ConfigurationManager.AppSettings["ConnectionString"]; for (int i = 0; i < workerCount; i++) { worker = new Worker(jobNameUpdateArticle, false, rabbitMQServer); workers[i] = worker; Task workerTask = new Task(() => { worker.JobHandler = (Job) => { try { long articleID = BitConverter.ToInt64(Job.Data, 0); art = ArticleRespository.GetArticleFromDb(articleID, articleConnectionString); if (art != null) { ArticleRespository.InsertArticleIntoCache(art); ArticleShortInfoRespository.InsertArticleShortInfoIntoCache(art); log.InfoFormat("insert complete:{0}", articleID); return(true); } else { log.InfoFormat("{0}: null", articleID); return(true); } } catch (Exception ex01) { log.Error(ex01); return(true); } }; worker.Start(); }); workerTask.Start(); } } catch (Exception ex) { log.Error("Start error", ex); throw; } }
protected override void OnStart(string[] args) { log.Info("Start service"); try { InitializeComponent(); cancelTokenSource = new CancellationTokenSource(); Server.LogConnectionString = ConfigurationManager.AppSettings["LogConnectionString"]; QT.Entities.Server.ConnectionString = connectionString; string rabbitMQServerName = ConfigurationManager.AppSettings["rabbitMQServerName"]; workers = new Worker[workerCount]; rabbitMQServer = RabbitMQManager.GetRabbitMQServer(rabbitMQServerName); for (int i = 0; i < workerCount; i++) { log.InfoFormat("Start worker {i}", i.ToString()); var worker = new Worker(refreshCacheProductInfoJobName, false, rabbitMQServer); workers[i] = worker; var token = this.cancelTokenSource.Token; Task workerTask = new Task(() => { var db = new QT.Entities.Data.SqlDb(this.connectionString); QT.Moduls.CrawlerProduct.Cache.CacheProductInfo cacheProductInfo = new QT.Moduls.CrawlerProduct.Cache.CacheProductInfo(db); worker.JobHandler = (jobMss) => { try { token.ThrowIfCancellationRequested(); QT.Entities.CrawlerProduct.RabbitMQ.MssRefreshCacheProductInfo mss = QT.Entities.CrawlerProduct.RabbitMQ.MssRefreshCacheProductInfo.FromJSON(QT.Entities.Common.ByteToString(jobMss.Data)); log.InfoFormat("Start run refresh company {0} : {1}", mss.CompanyID, mss.Domain); RedisCacheProductInfoAdapter rediscacheProductForCompany = RedisCacheProductInfoAdapter.Instance(); int numberProduct = cacheProductInfo.ReloadCacheForCompany(mss.CompanyID, mss.Domain); log.InfoFormat("End refresh company {0} : {1} {2} products", mss.CompanyID, mss.Domain, numberProduct); return(true); } catch (OperationCanceledException opc) { log.Info("End worker"); return(true); } }; worker.Start(); }, token); workerTask.Start(); log.InfoFormat("Worker {0} started", i); } } catch (Exception ex) { log.Error("Start error", ex); throw; } }
protected override void OnEnable() { // _server.Start(IPAddress.Loopback, 19137); //_server.WaitForIsListening(); //foreach (var m in _server.Methods) { //Log.InfoFormat("Exposed {0}", m.Key); } Log.InfoFormat("Dev Tools Started"); }
private void BUT_clearcustommaps_Click(object sender, EventArgs e) { var MainMap = new GMapControl(); MainMap.MapProvider = GoogleSatelliteMapProvider.Instance; var removed = MainMap.Manager.PrimaryCache.DeleteOlderThan(DateTime.Now, Custom.Instance.DbId); CustomMessageBox.Show("Removed " + removed + " images"); log.InfoFormat("Removed {0} images", removed); }
public virtual void OnMessage(QuickFix.FIX44.ApplicationMessageRequest message, SessionID session) { try { string sessionID = session.ToString(); logger.InfoFormat("SessionID[{0}]: ApplicationMessageRequest ApplReqID[{1}] msg[{2}]", sessionID, message.ApplReqID.ToString(), message.ToString()); } catch (Exception ex) { logger.Error("onMessage(ApplicationMessageRequest): " + ex.Message, ex); } }
protected override void OnStart(string[] args) { log.Info("Start service"); try { InitializeComponent(); cancelTokenSource = new CancellationTokenSource(); string rabbitMQServerName = ConfigurationManager.AppSettings["rabbitMQServerName"]; workers = new Worker[workerCount]; rabbitMQServer = RabbitMQManager.GetRabbitMQServer(rabbitMQServerName); QT.Moduls.LogCassandra.LogCrawler logCass = new QT.Moduls.LogCassandra.LogCrawler(); for (int i = 0; i < workerCount; i++) { log.InfoFormat("Start worker {i}", i.ToString()); var worker = new Worker(CrawlerProductLog, false, rabbitMQServer); workers[i] = worker; var token = this.cancelTokenSource.Token; Task workerTask = new Task(() => { worker.JobHandler = (updateDatafeedJob) => { try { token.ThrowIfCancellationRequested(); QT.Entities.CrawlerProduct.RabbitMQ.MssLogFindNewProduct mss = QT.Entities.CrawlerProduct.RabbitMQ.MssLogFindNewProduct.GetDataFromMessage(updateDatafeedJob.Data); logCass.LogFindNewProduct(mss.CRC, mss.Date_Log, mss.is_OK, mss.Product_ID, mss.Session, mss.Detail_Url); return(true); } catch (OperationCanceledException opc) { log.Info("End worker"); return(true); } catch (Exception ex01) { log.Info(ex01); return(true); } }; worker.Start(); }, token); workerTask.Start(); log.InfoFormat("Worker {0} started", i); } } catch (Exception ex) { log.Error("Start error", ex); throw; } }
public IHttpActionResult AddInvite(InviteDTO request) { logger.InfoFormat("data AddInvite controller is {0}", Newtonsoft.Json.JsonConvert.SerializeObject(request, Newtonsoft.Json.Formatting.Indented)); try { var result = _Service.AddInvite(request); return(Ok(result.ToJsonResult(result.Data))); } catch (Exception ex) { logger.Error("AddInvite Exception is {0}", ex); return(null); } }
public ViewBuilder() { _logger = log4net.LogManager.GetLogger(this.GetType()); Type genericMessageHandlerType = typeof (IHandleMessages<>); var eventStore = new PersistenceWireup( Wireup.Init() .UsingRavenPersistence("MembershipEventStore") ) .InitializeStorageEngine() .Build(); var container = BootstrapMessageHandlers(); var commits = eventStore.Advanced.GetFrom(new DateTime(1900, 1, 1)); foreach (var commit in commits) { foreach (var @event in commit.Events.Select(x => x.Body)) { Type eventType = @event.GetType(); _logger.InfoFormat("Handling event '{0}'", @eventType.Name); Type specificEventHandlerType = genericMessageHandlerType.MakeGenericType(eventType); Type enumerableOfEventHandlerType = typeof (IEnumerable<>).MakeGenericType(specificEventHandlerType); var handlers = container.Resolve(enumerableOfEventHandlerType) as IEnumerable; bool hadHandlers = false; if (handlers != null) { foreach (var handler in handlers) { hadHandlers = true; _logger.InfoFormat(" Processing event with handler {0}", handler.GetType()); var genericHandleMethod = handler.GetType().GetMethods().Where( m => m.Name == "Handle" && m.GetParameters()[0].ParameterType == eventType). SingleOrDefault(); if (genericHandleMethod != null) { genericHandleMethod.Invoke(handler, new object[]{ @event }); } } } if (!hadHandlers) { _logger.InfoFormat("No handlers found for event type {0}", eventType.Name); } } } }
public void LoadImpl() { HttpWebResponse response = null; try { WebRequest webRequest = WebRequest.Create(url); if (authUser != null) { webRequest.Credentials = new NetworkCredential(authUser, authPW, authDomain); } response = webRequest.GetResponse() as HttpWebResponse; if (response.StatusCode == HttpStatusCode.OK) { if (contentTypeMap.ContainsKey(response.ContentType)) { log.InfoFormat("TextureFetcher: content-type: {0}, content-length: {1}", response.ContentType, response.ContentLength); Stream stream = response.GetResponseStream(); BinaryReader binReader = new BinaryReader(stream); sourceBuffer = binReader.ReadBytes((int)response.ContentLength); ilImageType = contentTypeMap[response.ContentType]; binReader.Close(); log.InfoFormat("TextureFetcher: bytes read: {0}", sourceBuffer.Length); } else { log.ErrorFormat("TextureFetcher: invalid content type: {0} : {1}", response.ContentType, url); } } else { log.ErrorFormat("TextureFetcher: url not found: {0}", url); } } catch (Exception ex) { LogUtil.LogUtil.ExceptionLog.ErrorFormat("TextureFetcher: exception while loading image: {0}", ex); } finally { if (response != null) { response.Close(); } Done = true; } }
/// <summary> /// Returns an ArrayList of ActiveDirectory users for matching mailboxes /// </summary> /// <param name="attribute">ActiveDirectory attr to search for</param> /// <param name="terms">Array of seach terms</param> /// <returns>An active directoru result set</returns> public SearchResultCollection SearchDirectoryByAttribute( string attribute, params string[] terms) { if (log.IsInfoEnabled) { log.InfoFormat( "Searching Active Directory [Server={0}, User={1}]", this.LDAPServerUrl, ConfigCache.DomainUserLogin); } StringBuilder sb = new StringBuilder("("); // Build the LDAP query if (terms.Length > 0) { if (terms.Length > 1) { sb.Append("|"); foreach (string searchTerm in terms) { sb.AppendFormat("({0}={1})", attribute, searchTerm); } } else { sb.AppendFormat("{0}={1}", attribute, terms[0]); } } else { sb.AppendFormat("{0}=*", attribute); } sb.Append(")"); try { return(SearchDirectory(sb.ToString())); } catch (Exception ex) { throw new GCalExchangeException( GCalExchangeErrorCode.ActiveDirectoryError, "Error while querying Active Directory for exchange users.", ex); } }
public static void info(string format, params object[] args) { if (loginfo.IsInfoEnabled) { loginfo.InfoFormat(format, args); } }
public LogScope(ILog log, string format, params object[] args) { this.log = log; message = String.Format(format, args); log.InfoFormat("Begin: {0}", message); stopwatch = Stopwatch.StartNew(); }
private static HttpResponse remoteCall( int bufmax, ILog log, HttpWebRequest request ) { HttpWebResponse response; try { response = request.GetResponse() as HttpWebResponse; } catch (WebException e) { response = e.Response as HttpWebResponse; } Stream receiveStream = response.GetResponseStream(); Encoding encode = System.Text.Encoding.GetEncoding("utf-8"); StreamReader readStream = new StreamReader(receiveStream, encode); Char[] read = new Char[bufmax]; int count = readStream.Read(read, 0, bufmax); if (count == bufmax) throw new Exception("buf is small "); HttpResponse callRespon = new HttpResponse(); callRespon.body = new String(read, 0, count); callRespon.status = response.StatusCode.ToString(); log.InfoFormat("[GET] status {0} body:{1}", callRespon.status, callRespon.body); response.Close(); readStream.Close(); return callRespon; }
public static IGroupDataProvider GetProviderFromConfigName(ILog log, IConfig groupsConfig, string configName) { switch (configName) { case "XmlRpc": string ServiceURL = groupsConfig.GetString("XmlRpcServiceURL"); bool DisableKeepAlive = groupsConfig.GetBoolean("XmlRpcDisableKeepAlive", false); string ServiceReadKey = groupsConfig.GetString("XmlRpcServiceReadKey", String.Empty); string ServiceWriteKey = groupsConfig.GetString("XmlRpcServiceWriteKey", String.Empty); log.InfoFormat("[GROUPS]: XmlRpc Service URL set to: {0}", ServiceURL); return new XmlRpcGroupDataProvider(ServiceURL, DisableKeepAlive, ServiceReadKey, ServiceWriteKey); case "Native": string dbType = groupsConfig.GetString("NativeProviderDBType"); string connStr = groupsConfig.GetString("NativeProviderConnString"); ConnectionFactory connFactory = new ConnectionFactory(dbType, connStr); return new NativeGroupDataProvider(connFactory); } return null; }
static void PrintVersion(ILog log) { var assembly = Assembly.GetExecutingAssembly(); var attr = assembly.GetCustomAttribute<AssemblyVersionAttribute>(); var version = attr == null ? "FIXME" : attr.Version; log.InfoFormat("nsq_rand {0}", version); }
protected void RunProcess(string executablePath, string executableArgs, ILog logger) { logger.InfoFormat("{0} {1}", executablePath, executableArgs); var msDeploy = new Process { StartInfo = { UseShellExecute = false, RedirectStandardError = true, RedirectStandardOutput = true, FileName = executablePath, Arguments = executableArgs } }; msDeploy.Start(); while (!msDeploy.HasExited) { var output = msDeploy.StandardOutput.ReadToEnd(); var error = msDeploy.StandardError.ReadToEnd(); logger.Info(output); if (error.Length > 0) { logger.Error(error); throw new ApplicationException("An error occurred running process '"+Path.GetFileName(executablePath)+"'", new Exception(error)); } msDeploy.WaitForExit(2000); } }
protected AssemblyConfiguration(Dictionary<string, object> defaultValues, ILog logger) { NameValueCollection nameValues = ConfigurationManager.AppSettings; CreateConfiguration(defaultValues, nameValues); foreach (KeyValuePair<string, string> pair in usedValues) logger.InfoFormat("Using {0}={1}", pair.Key, pair.Value); }
public void InfoFormat(string format, params object[] args) { if (IsInfoEnabled) { log.InfoFormat(format, args); } }
/// <summary> /// 消息 /// </summary> public static void InfoFormat(string message, params object[] args) { if (log.IsInfoEnabled) { log.InfoFormat(message, args); } }
/// <summary> /// 输出普通日志 /// </summary> /// <param name="level"></param> /// <param name="format"></param> /// <param name="args"></param> private void Log(LoggerLevel level, string format, params object[] args) { switch (level) { case LoggerLevel.Debug: _Logger4net.DebugFormat(format, args); break; case LoggerLevel.Info: _Logger4net.InfoFormat(format, args); break; case LoggerLevel.Warn: _Logger4net.WarnFormat(format, args); break; case LoggerLevel.Error: _Logger4net.ErrorFormat(format, args); break; case LoggerLevel.Fatal: _Logger4net.FatalFormat(format, args); break; } }
//--- Class Methods --- public static bool Convert(string confluenceXMLRPCUrl, string confluenceAPIUrl, string confluenceUserName, string confluenceUserPassword, string dreamAPI, string dekiUserName, string dekiUserPassword, bool compatibleConvertUserPermissions, List <string> spacesToConvert, bool processNewsPages, bool processPersonalSpaces, string fallbackSpacePrefix) { using (ACConverter converter = new ACConverter()) { try { Log.Info("Connecting to MindTouch API"); converter.ConnectToDeki(dreamAPI, dekiUserName, dekiUserPassword); } catch (DreamResponseException dre) { Log.Fatal("Can not connect to MindTouch API server.", dre); return(false); } if (converter._connectedToDeki) { Log.Info("Successfully connected to MindTouch"); } else { Log.Fatal("Can not connect to MindTouch server."); return(false); } try { Log.Info("Connecting to Confluence API"); converter.ConnectToConfluence(confluenceAPIUrl, confluenceUserName, confluenceUserPassword); } catch (System.Net.WebException e) { Log.ErrorExceptionFormat(e, "Can not connect to Confluence"); return(false); } catch (System.Web.Services.Protocols.SoapException e) { if ((e.Detail != null) && (e.Detail.OuterXml != null)) { Log.Fatal("Can not connect to Confluence: " + e.Detail.OuterXml, e); } else { Log.Fatal("Can not connect to Confluence", e); } return(false); } // The base URL needs to be set globally in a static variable so that macros can access it. // For example so the "include" macro knows if a given link is for a page on the current confluence site or not. ConfluenceBaseURL = converter._confluenceService.GetServerInfo().baseUrl; Log.Info("Connecting to Confluence XMLRPC API"); if (!converter.ConnectToConfluenceRPC(confluenceXMLRPCUrl, confluenceUserName, confluenceUserPassword)) { Log.Fatal("Can not connect to Confluence XML RPC server."); } Log.Info("Successfully connected to Confluence"); RemoteServerInfo confluenceServerInfo = converter._confluenceService.GetServerInfo(); Log.InfoFormat("Confluence version: {0}.{1}.{2}", confluenceServerInfo.majorVersion.ToString(), confluenceServerInfo.minorVersion.ToString(), confluenceServerInfo.patchLevel.ToString()); converter.Convert(new XUri(confluenceServerInfo.baseUrl), compatibleConvertUserPermissions, processNewsPages, spacesToConvert, processPersonalSpaces, fallbackSpacePrefix); return(true); } }
internal static void LogMessages(ILog log) { log.Info("This is an info"); log.InfoFormat("Base called at {0}", DateTime.Now); log.Debug("This is a debug"); log.Warn("This is a warning"); log.Error("This is an error"); }
public static void Info(ILog log, string format, params object[] @params) { if (log == null) { return; } log.InfoFormat(format, @params); }
public static ActionBlock<StatsdMessage> CreateBlock(ITargetBlock<Bucket> target, string rootNamespace, bool removeZeroGauges, IIntervalService intervalService, ILog log) { var gauges = new ConcurrentDictionary<string, double>(); var root = rootNamespace; var ns = String.IsNullOrEmpty(rootNamespace) ? "" : rootNamespace + "."; var incoming = new ActionBlock<StatsdMessage>(p => { var gauge = p as Gauge; gauges.AddOrUpdate(gauge.Name, gauge.Value, (key, oldValue) => gauge.Value); }, Utility.UnboundedExecution()); intervalService.Elapsed += (sender, e) => { if (gauges.Count == 0) { return; } var items = gauges.ToArray(); var bucket = new GaugesBucket(items, e.Epoch, ns); if (removeZeroGauges) { // Get all zero-value gauges double placeholder; var zeroGauges = 0; for (int index = 0; index < items.Length; index++) { if (items[index].Value == 0) { gauges.TryRemove(items[index].Key, out placeholder); zeroGauges += 1; } } if (zeroGauges > 0) { log.InfoFormat("Removed {0} empty gauges.", zeroGauges); } } gauges.Clear(); target.Post(bucket); }; incoming.Completion.ContinueWith(p => { // Tell the upstream block that we're done target.Complete(); }); return incoming; }
public void AddInfoLog(ILog logger) { object[] args = new object[4]; if (!string.IsNullOrEmpty(Data)) { args[0] = LogProcess; args[1] = User; args[2] = Data; args[3] = Message; logger.InfoFormat("[{0}] [{1}] [{2}] [{3}]", args); } else { args[0] = LogProcess; args[1] = User; args[2] = Message; logger.InfoFormat("[{0}] [{1}] [{2}] ", args); } }
// Test-ClassInitialize public ServerHostTestFixture() { this.className = GetType().Name; // Set up the log4net configuration. BasicConfigurator.Configure(); this.log = LogManager.GetLogger(className); log.InfoFormat("{0} - Initialize", className); }
protected AssemblyConfiguration(string sectionGroup, string sectionName, Dictionary<string, object> defaultValues, ILog logger) { var nameValues = (NameValueCollection)ConfigurationManager.GetSection(sectionGroup + "/" + sectionName); if (logger == null) logger = LogManager.GetLogger(typeof (AssemblyConfiguration)); if (nameValues == null) { nameValues = new NameValueCollection(); } CreateConfiguration(defaultValues, nameValues); foreach (KeyValuePair<string, string> pair in usedValues) logger.InfoFormat("Using {0}={1} for {2}/{3}", pair.Key, pair.Value, sectionGroup, sectionName); }
public static IEnumerable<string> GetDeployments(this IOctopusSession session, Release release, IEnumerable<DeploymentEnvironment> environments, bool force, ILog log) { var linksToDeploymentTasks = new List<string>(); foreach (var environment in environments) { var deployment = session.DeployRelease(release, environment, force); var linkToTask = deployment.Link("Task"); linksToDeploymentTasks.Add(linkToTask); log.InfoFormat("Successfully scheduled release {0} for deployment to environment {1}", release.Version, environment.Name); } return linksToDeploymentTasks; }
public static Action ReleaseModifiers(this IKeyStateService keyStateService, ILog log) { var lastLeftShiftValue = keyStateService.KeyDownStates[KeyValues.LeftShiftKey].Value; var lastLeftCtrlValue = keyStateService.KeyDownStates[KeyValues.LeftCtrlKey].Value; var lastLeftWinValue = keyStateService.KeyDownStates[KeyValues.LeftWinKey].Value; var lastLeftAltValue = keyStateService.KeyDownStates[KeyValues.LeftAltKey].Value; log.InfoFormat("Releasing modifiers (shift:{0}, ctrl:{1}, win:{2}, alt:{3})", lastLeftShiftValue, lastLeftCtrlValue, lastLeftWinValue, lastLeftAltValue); keyStateService.KeyDownStates[KeyValues.LeftShiftKey].Value = KeyDownStates.Up; keyStateService.KeyDownStates[KeyValues.LeftCtrlKey].Value = KeyDownStates.Up; keyStateService.KeyDownStates[KeyValues.LeftWinKey].Value = KeyDownStates.Up; keyStateService.KeyDownStates[KeyValues.LeftAltKey].Value = KeyDownStates.Up; return () => { log.InfoFormat("Restoring modifiers (shift:{0}, ctrl:{1}, win:{2}, alt:{3})", lastLeftShiftValue, lastLeftCtrlValue, lastLeftWinValue, lastLeftAltValue); keyStateService.KeyDownStates[KeyValues.LeftShiftKey].Value = lastLeftShiftValue; keyStateService.KeyDownStates[KeyValues.LeftCtrlKey].Value = lastLeftCtrlValue; keyStateService.KeyDownStates[KeyValues.LeftWinKey].Value = lastLeftWinValue; keyStateService.KeyDownStates[KeyValues.LeftAltKey].Value = lastLeftAltValue; }; }
protected override void OnStart(string[] args) { _log = LogManager.GetLogger(this.GetType()); _isStopping = false; try { _log.Info("Starting minecraft"); MinecraftConfig config = ConfigurationManager.GetSection( typeof(MinecraftConfig).Name) as MinecraftConfig; Process startCmd = new Process(); startCmd.StartInfo.FileName = config.JavaExecutable; startCmd.StartInfo.Arguments = string.Format("-Xmx{0}M -Xms{1}M -jar {2}\\minecraft_server.jar nogui", config.MaxHeapInMegabytes, config.InitialHeapInMegabytes, config.MinecraftJarDirectory); startCmd.StartInfo.WorkingDirectory = config.MinecraftJarDirectory; startCmd.StartInfo.UseShellExecute = false; startCmd.StartInfo.RedirectStandardInput = true; startCmd.StartInfo.RedirectStandardOutput = true; startCmd.StartInfo.RedirectStandardError = true; startCmd.EnableRaisingEvents = true; startCmd.OutputDataReceived += new DataReceivedEventHandler(startCmd_OutputDataReceived); startCmd.ErrorDataReceived += new DataReceivedEventHandler(startCmd_ErrorDataReceived); startCmd.Exited += new EventHandler(startCmd_Exited); _log.InfoFormat("Executing '{0} {1}'", startCmd.StartInfo.FileName, startCmd.StartInfo.Arguments); startCmd.Start(); startCmd.BeginOutputReadLine(); startCmd.BeginErrorReadLine(); _standardInput = startCmd.StandardInput; } catch (Exception ex) { _log.Fatal("Failed to start minecraft", ex); } }
void RenderToConsole(ActivityElement element, ILog log, string indent) { if (!IsPrintable(element)) return; if (element.Status == ActivityStatus.Success) { Console.ForegroundColor = ConsoleColor.Green; } else if (element.Status == ActivityStatus.SuccessWithWarning) { Console.ForegroundColor = ConsoleColor.Yellow; } else if (element.Status == ActivityStatus.Failed) { Console.ForegroundColor = ConsoleColor.Red; } Console.WriteLine("{0} {1}: {2}", indent, element.Status, element.Name); Console.ResetColor(); foreach (var logEntry in element.LogElements) { if (logEntry.Category == "Error" || logEntry.Category == "Fatal") { Console.ForegroundColor = ConsoleColor.Red; } else if (logEntry.Category == "Warning") { Console.ForegroundColor = ConsoleColor.Yellow; } log.InfoFormat("{0}{1,-8} {2}", indent, logEntry.Category, LineSplitter.Split(indent + new string(' ', 11), logEntry.MessageText)); Console.ResetColor(); } foreach (var child in element.Children) { RenderToConsole(child, log, indent + " "); } }
public MessagesEndpoint(IBus bus, ILog log) : base("/messages") { _bus = bus; _log = log; Post["/"] = parameters => { string typeName = Request.Query.type; if (typeName == null) { throw new Exception("type required"); } var type = typeof (MessagesEndpoint).Assembly.GetTypes() .Concat(typeof (IEntity<>).Assembly.GetTypes()) .FirstOrDefault(t => t.Name.EqualsIgnoreCase(typeName)); if (type == null) { throw new Exception("Could not find type {0}".FormatFrom(typeName)); } var message = Activator.CreateInstance(type); if (message == null) { throw new Exception("could not deserialize message"); } var bindToMethod = typeof (ModuleExtensions).GetMethods(BindingFlags.Public | BindingFlags.Static) .Single(m => m.Name == "BindTo" && m.GetParameters().Count() == 2); bindToMethod.MakeGenericMethod(type).Invoke(this, new[] {this, message}); _log.InfoFormat("Publishing inbound message: ({0}) {1}", type.Name, JsonConvert.SerializeObject(message)); _bus.Publish(message); return message; }; }
public BooleanResult get(Dictionary<string,string> settings, string username, string password) { m_logger = LogManager.GetLogger(String.Format("pgSMB2[Roaming:{0}]", username)); if (!UserCanBeUsed(username)) { // user exists and was not created by pgina m_logger.InfoFormat("user {0} does already exist on this system and does not contain a comment of \"pGina created pgSMB2\"", username); return new BooleanResult() { Success = true }; } if (!Connect2share(settings["SMBshare"], username, password, Convert.ToUInt32(settings["ConnectRetry"]), false)) { return new BooleanResult() { Success = false, Message = string.Format("Unable to connect to {0}", settings["RoamingSource"]) }; } try { if (!Directory.Exists(settings["RoamingSource"])) { try { Directory.CreateDirectory(settings["RoamingSource"]); } catch (Exception ex) { m_logger.DebugFormat("CreateDirectory({0}) failed {1}", settings["RoamingSource"], ex.Message); } } string remote_file = settings["RoamingSource"] + "\\" + settings["Filename"]; if (File.Exists(remote_file) || File.Exists(remote_file + ".bak")) { // there is a remote file Boolean loadprofile = true; // what file to use ? if (File.Exists(remote_file)) { settings.Add("Filename_real", settings["Filename"]); } else { settings.Add("Filename_real", settings["Filename"] + ".bak"); remote_file += ".bak"; } // is there a local roaming profile (there shouldnt be any) string ProfDir = GetExistingUserProfile(username, password); // is this a temp profile if (!String.IsNullOrEmpty(ProfDir)) { Abstractions.WindowsApi.pInvokes.structenums.USER_INFO_4 userinfo4 = new Abstractions.WindowsApi.pInvokes.structenums.USER_INFO_4(); if (Abstractions.WindowsApi.pInvokes.UserGet(username, ref userinfo4)) { if (userinfo4.comment.EndsWith(" tmp")) { m_logger.InfoFormat("delete temp profile {0}", ProfDir); DirectoryDel(ProfDir, 3); } } } if (File.Exists(ProfDir + "\\ntuser.dat")) //worst case "\\ntuser.dat" { // there is a local profile of this user // we need to compare the write date between the profile and the compressed remote roaming profile // to be sure that we dont overwrite a newer profile with an old one // possibly reason is a BSOD/hard reset ... m_logger.Debug("User " + username + " still own a lokal profile UTCdate:" + File.GetLastWriteTimeUtc(ProfDir + "\\ntuser.dat")); m_logger.Debug("User " + username + " compressed remote profile UTCdate:" + File.GetLastWriteTimeUtc(remote_file)); if (DateTime.Compare(File.GetLastWriteTimeUtc(ProfDir + "\\ntuser.dat"), File.GetLastWriteTimeUtc(remote_file)) >= 0) { m_logger.DebugFormat("the local profile ('{0}') is newer/equal than the remote one, im not downloading the remote one", ProfDir); loadprofile = false; } else { m_logger.Debug("the local profile is older than the remote one"); } } if (!userAdd(settings, username, password, "pGina created pgSMB2")) { userDel(settings, username, password); return new BooleanResult() { Success = false, Message = string.Format("Unable to add user {0}", username) }; } if (loadprofile) { if (!GetProfile(ref settings, username, password)) { return new BooleanResult() { Success = false, Message = string.Format("Unable to get the Profile {0} from {1}", settings["Filename"], settings["RoamingSource"]) }; } if (!Connect2share(settings["SMBshare"], null, null, 0, true)) { m_logger.WarnFormat("unable to disconnect from {0}", settings["RoamingSource"]); } if (!SetACL(settings["UserProfilePath"], username, password, Convert.ToUInt32(settings["MaxStore"]), Convert.ToUInt32(settings["ConnectRetry"]))) { userDel(settings, username, password); return new BooleanResult() { Success = false, Message = string.Format("Unable to set ACL for user {0}", username) }; } } } else { m_logger.DebugFormat("there is no {0}\\{1} or {2}\\{3}{4}", settings["RoamingSource"], settings["Filename"], settings["RoamingSource"], settings["Filename"], ".bak"); if (!userAdd(settings, username, password, "pGina created pgSMB2")) { userDel(settings, username, password); return new BooleanResult() { Success = false, Message = string.Format("Unable to add user {0}", username) }; } } } catch (Exception ex) { return new BooleanResult() { Success = false, Message = string.Format("Unable to get the Roaming Profile from {0}\nError: {1}", settings["RoamingSource"], ex.Message) }; } finally { if (!Connect2share(settings["SMBshare"], null, null, 0, true)) { m_logger.WarnFormat("unable to disconnect from {0}", settings["RoamingSource"]); } } return new BooleanResult() { Success = true }; }
private void RestartApplication(DeploymentContext context, ILog logger) { string virtualDirectoryPath = null; string[] websitePath = context.Package.Title.Split(new[] {'/'}, StringSplitOptions.RemoveEmptyEntries); if (websitePath.Length > 1) { virtualDirectoryPath = string.Join("/", websitePath.Skip(1).ToArray()); } using (var website = FindVirtualDirectory("localhost", websitePath[0], virtualDirectoryPath)) { if (website == null) { logger.WarnFormat("No such IIS website found: '{0}'", context.Package.Id); } var appPoolId = website.Properties["AppPoolId"].Value; using (var applicationPool = new DirectoryEntry("IIS://localhost/W3SVC/AppPools/" + appPoolId)) { logger.InfoFormat("Stopping AppPool {0}...", appPoolId); applicationPool.Invoke("Stop"); logger.InfoFormat("Starting AppPool {0}...", appPoolId); applicationPool.Invoke("Start"); } } }
private static void Main() { XmlConfigurator.Configure(); _log = LogManager.GetLogger(ServiceName); _log.InfoFormat("Starting service ..."); _appName = System.Diagnostics.Process.GetCurrentProcess().ProcessName.Replace(".vshost", string.Empty); _displayName = _appName; _parameters = ParseCommandLine(Environment.CommandLine); //определяем комманду на удаление сервиса var isNeedRemoveService = _parameters.ContainsKey(ParamServiceNo); if (isNeedRemoveService) { RemoveService(); return; } //определяем какой вид запуска (сервис или консоль) var runAsService = _parameters.ContainsKey(ParamService); //запуск приложения как сервиса if (runAsService) { // если сервиса нет - устанавливаем его и запускаем var sysService = GetInstalledService(); if (sysService == null) { sysService = CreateService(); sysService.Start(); return; } // если сервис есть - запускаем логику var svc = new SchedulerHost(new ServiceContext(_parameters)); ServiceBase.Run(svc); } //запуск в режиме консольного приложения else { var app = new SchedulerHost(new ServiceContext(_parameters)); try { app.Start(null); _log.Info("Press escape to exit"); ConsoleKeyInfo keyInfo; do keyInfo = Console.ReadKey(); while (keyInfo.Key != ConsoleKey.Escape); } catch (Exception ex) { _log.ErrorFormat("Fatal error: {0}", ex); Console.WriteLine("Press a key to exit"); Console.ReadKey(); } finally { app.Stop(); } } }
private static void DisplayResults(IPersistance persistance, ICommandLine parser, ILog logger) { if (!logger.IsInfoEnabled) return; var CoverageSession = persistance.CoverageSession; var totalClasses = 0; var visitedClasses = 0; var altTotalClasses = 0; var altVisitedClasses = 0; var totalMethods = 0; var visitedMethods = 0; var altTotalMethods = 0; var altVisitedMethods = 0; var unvisitedClasses = new List<string>(); var unvisitedMethods = new List<string>(); if (CoverageSession.Modules != null) { foreach (var @class in from module in CoverageSession.Modules.Where(x=>x.Classes != null) from @class in module.Classes.Where(c => !c.ShouldSerializeSkippedDueTo()) select @class) { if (@class.Methods == null) continue; if ((@class.Methods.Any(x => !x.ShouldSerializeSkippedDueTo() && x.SequencePoints.Any(y => y.VisitCount > 0)))) { visitedClasses += 1; totalClasses += 1; } else if ((@class.Methods.Any(x => x.FileRef != null))) { totalClasses += 1; unvisitedClasses.Add(@class.FullName); } if (@class.Methods.Any(x => x.Visited)) { altVisitedClasses += 1; altTotalClasses += 1; } else if (@class.Methods.Any()) { altTotalClasses += 1; } foreach (var method in @class.Methods.Where(x=> !x.ShouldSerializeSkippedDueTo())) { if ((method.SequencePoints.Any(x => x.VisitCount > 0))) { visitedMethods += 1; totalMethods += 1; } else if (method.FileRef != null) { totalMethods += 1; unvisitedMethods.Add(string.Format("{0}", method.Name)); } altTotalMethods += 1; if (method.Visited) { altVisitedMethods += 1; } } } } if (totalClasses > 0) { logger.InfoFormat("Visited Classes {0} of {1} ({2})", visitedClasses, totalClasses, Math.Round(visitedClasses * 100.0 / totalClasses, 2)); logger.InfoFormat("Visited Methods {0} of {1} ({2})", visitedMethods, totalMethods, Math.Round(visitedMethods * 100.0 / totalMethods, 2)); logger.InfoFormat("Visited Points {0} of {1} ({2})", CoverageSession.Summary.VisitedSequencePoints, CoverageSession.Summary.NumSequencePoints, CoverageSession.Summary.SequenceCoverage); logger.InfoFormat("Visited Branches {0} of {1} ({2})", CoverageSession.Summary.VisitedBranchPoints, CoverageSession.Summary.NumBranchPoints, CoverageSession.Summary.BranchCoverage); logger.InfoFormat(""); logger.InfoFormat( "==== Alternative Results (includes all methods including those without corresponding source) ===="); logger.InfoFormat("Alternative Visited Classes {0} of {1} ({2})", altVisitedClasses, altTotalClasses, Math.Round(altVisitedClasses * 100.0 / altTotalClasses, 2)); logger.InfoFormat("Alternative Visited Methods {0} of {1} ({2})", altVisitedMethods, altTotalMethods, Math.Round(altVisitedMethods * 100.0 / altTotalMethods, 2)); if (parser.ShowUnvisited) { logger.InfoFormat(""); logger.InfoFormat("====Unvisited Classes===="); foreach (var unvisitedClass in unvisitedClasses) { logger.InfoFormat(unvisitedClass); } logger.InfoFormat(""); logger.InfoFormat("====Unvisited Methods===="); foreach (var unvisitedMethod in unvisitedMethods) { logger.InfoFormat(unvisitedMethod); } } } else { logger.InfoFormat("No results - no assemblies that matched the supplied filter were instrumented"); logger.InfoFormat(" this could be due to missing PDBs for the assemblies that match the filter"); logger.InfoFormat(" please review the output file and refer to the Usage guide (Usage.rtf)"); } }
public virtual void ToLog(ILog log) { // Remove password from logging var safeConnectionString = new SqlConnectionStringBuilder(ConnectionString); if (!string.IsNullOrEmpty(safeConnectionString.Password)) { safeConnectionString.Password = "******"; } log.InfoFormat(" {0}: {1}", Name, safeConnectionString); // Validate that connection string do not provide both Trusted Security AND user/password bool hasUserCreds = !string.IsNullOrEmpty(safeConnectionString.UserID) || !string.IsNullOrEmpty(safeConnectionString.Password); if (safeConnectionString.IntegratedSecurity == hasUserCreds) { log.Error("=================================================="); log.ErrorFormat("Connection string for '{0}' may not contain both Integrated Security and User ID/Password credentials. " + "Review the readme.md and update the config file.", safeConnectionString.DataSource); log.Error("=================================================="); } }
public override void Run() { XmlConfigurator.Configure(); _logger = LogManager.GetLogger(GetType()); try { _logger.Info("Starting Worker Role"); //wire up services new AzureBootstrap().Init(); Trace.TraceInformation("Distributr.Azure.CommandWorkerRole entry point called", "Information"); CloudQueueMessage msg = null; IBusSubscriber busSubscriber = ObjectFactory.GetInstance<IBusSubscriber>(); bool validSettings = CheckValidSettingsAtStartup(); while (true) { //TODO add retry limit strategy try { bool messageFound = false; // If OnStop has been called, return to do a graceful shutdown. if (onStopCalled == true) { Trace.TraceInformation("onStopCalled WorkerRoleB"); returnedFromRunMethod = true; return; } msg = _incomingCommandQueue.GetMessage(); bool processed = false; if (msg != null) { int retrycount = msg.DequeueCount; BusMessage busMessage = msg.FromMessage(); _logger.InfoFormat("Message Id : {0} - Type : {1} Retry count : {2}", busMessage.MessageId, busMessage.CommandType, retrycount); using (IContainer nested = ObjectFactory.Container.GetNestedContainer()) { ICommandProcessingAuditRepository _processingAudit = nested.GetInstance<ICommandProcessingAuditRepository>(); var validDequeProcess = CheckValidDequeueProcess(msg, _processingAudit, busMessage); if(!validDequeProcess) processed = true; if (validDequeProcess) { _processingAudit.SetCommandStatus(busMessage.MessageId, CommandProcessingStatus.SubscriberProcessBegin); IBusSubscriber subscriber = nested.GetInstance<IBusSubscriber>(); subscriber.Handle(busMessage); Thread.Sleep(50); CommandProcessingAudit auditItem = _processingAudit.GetByCommandId(busMessage.MessageId); if (auditItem.Status == CommandProcessingStatus.Complete) { _logger.Info("Complete processing .. removing from queue"); _incomingCommandQueue.DeleteMessage(msg); processed = true; } else { _logger.InfoFormat("Did not process message {0} {1} ..... Will be retried ", busMessage.MessageId, busMessage.CommandType); //default queue message invisibility is 30 seconds if (busMessage.CommandType.StartsWith("Add")) { _logger.InfoFormat( "Add message {0} - {1} requeued for 50 seconds-------------------------", busMessage.MessageId, busMessage.CommandType); _incomingCommandQueue.UpdateMessage(msg, TimeSpan.FromSeconds(50), MessageUpdateFields.Visibility); } if (busMessage.CommandType.StartsWith("Confirm")) { _logger.InfoFormat( "Confirm message {0} - {1} requeued for 100 seconds--------------------------", busMessage.MessageId, busMessage.CommandType); _incomingCommandQueue.UpdateMessage(msg, TimeSpan.FromSeconds(100), MessageUpdateFields.Visibility); } } } } //introduce a delay for retries over 20 to reduce azure transaction count if (!processed && retrycount > 20) { _logger.Info("s5000"); Thread.Sleep(5000); } _logger.Info("<<................................................. Command Processing Complete ......................................................................................>>"); _logger.Info(" "); } else { _logger.Info("s10000"); Thread.Sleep(10000); } Trace.TraceInformation("Working", "Information"); } catch (Exception ex) { _logger.Error("Run while Error", ex); string err = ex.Message; if (ex.InnerException != null) { err += " Inner Exception: " + ex.InnerException.Message; } Trace.TraceError(err); } } } catch (Exception ex) { _logger.Error("Global Error",ex); } }
public override void ToLog(ILog log) { base.ToLog(log); // Attempt to connect to the server and get basic details about the server and the databases. Dictionary<string, DatabaseDetails> databaseDetailsByName; try { var queryLocator = new QueryLocator(new DapperWrapper()); SqlQuery serverDetailsQuery = queryLocator.PrepareQueries(new[] {typeof (SqlServerDetails),}, false).Single(); SqlQuery databasesDetailsQuery = queryLocator.PrepareQueries(new[] {typeof (DatabaseDetails),}, false).Single(); using (var conn = new SqlConnection(ConnectionString)) { // Log the server details SqlServerDetails serverDetails = serverDetailsQuery.Query<SqlServerDetails>(conn, this).Single(); LogVerboseSqlResults(serverDetailsQuery, new[] {serverDetails}); log.InfoFormat(" {0} {1} {2} ({3})", serverDetails.SQLTitle, serverDetails.Edition, serverDetails.ProductLevel, serverDetails.ProductVersion); // Sotre these for reporting below DatabaseDetails[] databasesDetails = databasesDetailsQuery.DatabaseMetricQuery<DatabaseDetails>(conn, this).ToArray(); LogVerboseSqlResults(databasesDetailsQuery, databasesDetails); databaseDetailsByName = databasesDetails.ToDictionary(d => d.DatabaseName); } } catch (Exception e) { // Just log some details here. The subsequent queries for metrics yields more error details. log.ErrorFormat(" Unable to connect: {0}", e.Message); databaseDetailsByName = null; } bool hasExplicitIncludedDatabases = IncludedDatabaseNames.Any(); if (hasExplicitIncludedDatabases) { // Show the user the databases we'll be working from foreach (Database database in IncludedDatabases) { string message = " Including DB: " + database.Name; // When the details are reachable, show them if (databaseDetailsByName != null) { DatabaseDetails details; if (databaseDetailsByName.TryGetValue(database.Name, out details)) { message += string.Format(" [CompatibilityLevel={0};State={1}({2});CreateDate={3:yyyy-MM-dd};UserAccess={4}({5})]", details.compatibility_level, details.state_desc, details.state, details.create_date, details.user_access_desc, details.user_access); } else { // More error details are reported with metric queries message += " [Unable to find database information]"; } } log.Info(message); } } else if (databaseDetailsByName != null) { // The user didn't specifically include any databases // Report details for all of the DBs we expect to gather metrics against foreach (DatabaseDetails details in databaseDetailsByName.Values) { log.InfoFormat(" Including DB: {0} [CompatibilityLevel={1};State={2}({3});CreateDate={4:yyyy-MM-dd};UserAccess={5}({6})]", details.DatabaseName, details.compatibility_level, details.state_desc, details.state, details.create_date, details.user_access_desc, details.user_access); } } // If there are included DB's, log the Excluded DB's as DEBUG info. Action<string> logger = hasExplicitIncludedDatabases ? (Action<string>) log.Debug : log.Info; foreach (string database in ExcludedDatabaseNames) { logger(" Excluding DB: " + database); } }
/// <summary> /// Initializes library. /// </summary> static XInputDll() { Log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); Log.InfoFormat("Library loaded by process {0} [{1}]", Process.GetCurrentProcess().ProcessName, Process.GetCurrentProcess().MainWindowTitle); var myself = Assembly.GetExecutingAssembly().GetName(); var myPath = Assembly.GetExecutingAssembly().Location; var myName = Path.GetFileName(myPath); Log.InfoFormat("Initializing library {0} [{1}]", myName, myself.Version); try { var basePath = BasePath; Log.DebugFormat("ScpToolkit bin path: {0}", basePath); var controlPath = ScpControlPath; Log.DebugFormat("ScpControl bin path: {0}", controlPath); // resolve assembly dependencies AppDomain.CurrentDomain.AssemblyResolve += (sender, args) => { var asmName = new AssemblyName(args.Name).Name; var asmPath = Path.Combine(basePath, string.Format("{0}.dll", asmName)); Log.DebugFormat("Loading assembly {0} from {1}", asmName, asmPath); return Assembly.LoadFrom(asmPath); }; var scpControl = Assembly.LoadFrom(controlPath); var scpProxyType = scpControl.GetType("ScpControl.ScpProxy"); Proxy = Activator.CreateInstance(scpProxyType); Proxy.Start(); } catch (Exception ex) { Log.FatalFormat("Error during library initialization: {0}", ex); return; } // if no custom path specified by user, use DLL in system32 dir var xinputPath = !string.IsNullOrEmpty(XInputDllPath) && File.Exists(XInputDllPath) ? XInputDllPath : Path.Combine(Environment.SystemDirectory, myName); Log.DebugFormat("Original XInput DLL path: {0}", xinputPath); NativeDllHandle = Kernel32Natives.LoadLibrary(xinputPath); if (NativeDllHandle == IntPtr.Zero) { Log.FatalFormat("Couldn't load native DLL: {0}", new Win32Exception(Marshal.GetLastWin32Error())); return; } Log.Info("Library initialized"); }
public void ToLog(ILog log) { // Pending review by New Relic before adding this information // log.Info(" New Relic Key: " + LicenseKey); log.Info(" Version: " + Version); log.Info(" Test Mode: " + (TestMode ? "Yes" : "No")); log.Info(" Windows Service: " + (Environment.UserInteractive ? "No" : "Yes")); log.InfoFormat(@" User: {0}\{1}", Environment.UserDomainName, Environment.UserName); log.Info(" Run as Administrator: " + (IsProcessElevated ? "Yes" : "No")); log.Info(" Total Endpoints: " + Endpoints.Length); log.Info(" Poll Interval Seconds: " + PollIntervalSeconds); var sqlServerEndpoints = Endpoints.OfType<SqlServerEndpoint>().ToArray(); if (sqlServerEndpoints.Any()) { log.InfoFormat(" SqlServerEndpoints: {0}", sqlServerEndpoints.Count()); log.InfoFormat(" PluginGUID: {0}", Constants.SqlServerComponentGuid); foreach (ISqlEndpoint endpoint in sqlServerEndpoints) { endpoint.ToLog(log); } log.Info(string.Empty); } else { log.Debug("No SQL Server endpoints configured."); } var azureEndpoints = Endpoints.OfType<AzureSqlEndpoint>().ToArray(); if (azureEndpoints.Any()) { log.InfoFormat(" AzureEndpoints: {0}", azureEndpoints.Count()); log.InfoFormat(" PluginGUID: {0}", Constants.SqlAzureComponentGuid); foreach (ISqlEndpoint endpoint in azureEndpoints) { endpoint.ToLog(log); } log.Info(string.Empty); } else { log.Debug("No Azure SQL endpoints configured."); } }
private static void DisplayResults(IPersistance persistance, ICommandLine parser, ILog logger) { if (!logger.IsInfoEnabled) return; var CoverageSession = persistance.CoverageSession; var totalClasses = 0; var visitedClasses = 0; var altTotalClasses = 0; var altVisitedClasses = 0; var totalSeqPoint = 0; var visitedSeqPoint = 0; var totalMethods = 0; var visitedMethods = 0; var altTotalMethods = 0; var altVisitedMethods = 0; var totalBrPoint = 0; var visitedBrPoint = 0; var unvisitedClasses = new List<string>(); var unvisitedMethods = new List<string>(); if (CoverageSession.Modules != null) { foreach (var @class in from module in CoverageSession.Modules.Where(x=>x.Classes != null) from @class in module.Classes.Where(c => !c.ShouldSerializeSkippedDueTo()) select @class) { if (@class.Methods == null) continue; if ((@class.Methods.Any(x => !x.ShouldSerializeSkippedDueTo() && x.SequencePoints.Any(y => y.VisitCount > 0)))) { visitedClasses += 1; totalClasses += 1; } else if ((@class.Methods.Any(x => x.FileRef != null))) { totalClasses += 1; unvisitedClasses.Add(@class.FullName); } if (@class.Methods.Any(x => x.Visited)) { altVisitedClasses += 1; altTotalClasses += 1; } else if (@class.Methods.Any()) { altTotalClasses += 1; } foreach (var method in @class.Methods.Where(x=> !x.ShouldSerializeSkippedDueTo())) { if ((method.SequencePoints.Any(x => x.VisitCount > 0))) { visitedMethods += 1; totalMethods += 1; } else if (method.FileRef != null) { totalMethods += 1; unvisitedMethods.Add(string.Format("{0}", method.Name)); } altTotalMethods += 1; if (method.Visited) { altVisitedMethods += 1; } totalSeqPoint += method.SequencePoints.Count(); visitedSeqPoint += method.SequencePoints.Count(pt => pt.VisitCount != 0); totalBrPoint += method.BranchPoints.Count(); visitedBrPoint += method.BranchPoints.Count(pt => pt.VisitCount != 0); } } } if (totalClasses > 0) { logger.InfoFormat("Visited Classes {0} of {1} ({2})", visitedClasses, totalClasses, (double)visitedClasses * 100.0 / (double)totalClasses); logger.InfoFormat("Visited Methods {0} of {1} ({2})", visitedMethods, totalMethods, (double)visitedMethods * 100.0 / (double)totalMethods); logger.InfoFormat("Visited Points {0} of {1} ({2})", visitedSeqPoint, totalSeqPoint, (double)visitedSeqPoint * 100.0 / (double)totalSeqPoint); logger.InfoFormat("Visited Branches {0} of {1} ({2})", visitedBrPoint, totalBrPoint, (double)visitedBrPoint * 100.0 / (double)totalBrPoint); logger.InfoFormat(""); logger.InfoFormat( "==== Alternative Results (includes all methods including those without corresponding source) ===="); logger.InfoFormat("Alternative Visited Classes {0} of {1} ({2})", altVisitedClasses, altTotalClasses, (double)altVisitedClasses * 100.0 / (double)altTotalClasses); logger.InfoFormat("Alternative Visited Methods {0} of {1} ({2})", altVisitedMethods, altTotalMethods, (double)altVisitedMethods * 100.0 / (double)altTotalMethods); if (parser.ShowUnvisited) { logger.InfoFormat(""); logger.InfoFormat("====Unvisited Classes===="); foreach (var unvisitedClass in unvisitedClasses) { logger.InfoFormat(unvisitedClass); } logger.InfoFormat(""); logger.InfoFormat("====Unvisited Methods===="); foreach (var unvisitedMethod in unvisitedMethods) { logger.InfoFormat(unvisitedMethod); } } } else { logger.InfoFormat("No results - no assemblies that matched the supplied filter were instrumented (missing PDBs?)"); } }