public void SafeKillAllRunningJobInstances(IJobLogger logger) { try { Process[] processes = Process.GetProcesses(); foreach (Process process in processes) { StringDictionary processEnvironment; bool success = ProcessEnvironment.TryGetEnvironmentVariables(process, out processEnvironment); if (success && processEnvironment.ContainsKey(GetJobEnvironmentKey())) { try { process.Kill(true, TraceFactory.GetTracer()); } catch (Exception ex) { if (!process.HasExited) { logger.LogWarning("Failed to kill process - {0} for job - {1}\n{2}".FormatInvariant(process.ProcessName, JobName, ex)); } } } } } catch (Exception ex) { logger.LogWarning(ex.ToString()); } }
/// <summary> /// This can be called from a different thread so must be in lock /// </summary> /// <param name="sender"></param> private void MessageBrokerDisconnected(IMessageConsumer sender) { Logger.LogWarning("Message broker disconnected... stopping workers"); StopJobWorkers(true, true); Logger.LogWarning("Message broker disconnected... stopped workers"); }
private void CacheJobBinaries(IJobLogger logger) { if (WorkingDirectory != null) { try { int currentHash = CalculateHashForJob(JobBinariesPath); int lastHash = CalculateHashForJob(WorkingDirectory); if (lastHash == currentHash) { return; } } catch (Exception ex) { // Log error and ignore it as it's not critical to cache job binaries logger.LogError("Failed to calculate hash for WebJob: " + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath); WorkingDirectory = tempJobInstancePath; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled WorkingDirectory = null; } }
public void LogTextFileWarning_TestOk(string message) { try { loggerText.LogWarning(message); Assert.IsTrue(true); } catch (Exception ex) { Assert.IsTrue(false, ex.ToString()); } }
protected void CloseConnection() { lock (_connectionLock) { if (Connection == null) { return; } ModelPool.SafeDispose(Logger); ModelPool = null; Connection.CallbackException -= ConnectionOnCallbackException; Connection.ConnectionBlocked -= ConnectionOnConnectionBlocked; Connection.ConnectionUnblocked -= ConnectionOnConnectionUnblocked; Connection.ConnectionShutdown -= ConnectionOnConnectionShutdown; Connection.SafeClose(Logger); Connection = null; Logger.LogWarning("Connection closed and set to null"); } }
private void CacheJobBinaries(JobBase job, IJobLogger logger) { bool isInPlaceDefault = job.ScriptHost.GetType() == typeof(NodeScriptHost); if (JobSettings.GetIsInPlace(isInPlaceDefault)) { _inPlaceWorkingDirectory = JobBinariesPath; SafeKillAllRunningJobInstances(logger); UpdateAppConfigs(WorkingDirectory); return; } _inPlaceWorkingDirectory = null; if (WorkingDirectory != null) { try { int currentHash = CalculateHashForJob(JobBinariesPath); int lastHash = CalculateHashForJob(WorkingDirectory); if (lastHash == currentHash) { return; } } catch (Exception ex) { // Log error and ignore it as it's not critical to cache job binaries logger.LogWarning("Failed to calculate hash for WebJob, continue to copy WebJob binaries (this will not affect WebJob run)\n" + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath); _workingDirectory = tempJobInstancePath; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled _workingDirectory = null; } }
public void SafeKillAllRunningJobInstances(IJobLogger logger) { try { Process[] processes = Process.GetProcesses(); foreach (Process process in processes) { Dictionary<string, string> processEnvironment; bool success = process.TryGetEnvironmentVariables(out processEnvironment); if (success && processEnvironment.ContainsKey(GetJobEnvironmentKey())) { try { process.Kill(true, TraceFactory.GetTracer()); } catch (Exception ex) { if (!process.HasExited) { logger.LogWarning("Failed to kill process - {0} for job - {1}\n{2}".FormatInvariant(process.ProcessName, JobName, ex)); } } } } } catch (Exception ex) { logger.LogWarning(ex.ToString()); } }
private void CacheJobBinaries(JobBase job, IJobLogger logger) { bool isInPlaceDefault = job.ScriptHost.GetType() == typeof(NodeScriptHost); if (JobSettings.GetIsInPlace(isInPlaceDefault)) { _inPlaceWorkingDirectory = JobBinariesPath; SafeKillAllRunningJobInstances(logger); UpdateAppConfigs(WorkingDirectory, _analytics); return; } _inPlaceWorkingDirectory = null; Dictionary<string, FileInfoBase> sourceDirectoryFileMap = GetJobDirectoryFileMap(JobBinariesPath); if (WorkingDirectory != null) { try { var workingDirectoryFileMap = GetJobDirectoryFileMap(WorkingDirectory); if (!JobDirectoryHasChanged(sourceDirectoryFileMap, workingDirectoryFileMap, _cachedSourceDirectoryFileMap, logger)) { // no changes detected, so skip the cache/copy step below return; } } catch (Exception ex) { // Log error and ignore it, since this diff optimization isn't critical. // We'll just do a full copy in this case. logger.LogWarning("Failed to diff WebJob directories for changes. Continuing to copy WebJob binaries (this will not affect the WebJob run)\n" + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath, _analytics); _workingDirectory = tempJobInstancePath; // cache the file map snapshot for next time (to aid in detecting // file deletions) _cachedSourceDirectoryFileMap = sourceDirectoryFileMap; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled _workingDirectory = null; } }
private void CacheJobBinaries(JobBase job, IJobLogger logger) { bool isInPlaceDefault = job.ScriptHost.GetType() == typeof(NodeScriptHost); if (JobSettings.GetIsInPlace(isInPlaceDefault)) { _inPlaceWorkingDirectory = JobBinariesPath; SafeKillAllRunningJobInstances(logger); UpdateAppConfigs(WorkingDirectory, _analytics); return; } _inPlaceWorkingDirectory = null; Dictionary <string, FileInfoBase> sourceDirectoryFileMap = GetJobDirectoryFileMap(JobBinariesPath); if (WorkingDirectory != null) { try { var workingDirectoryFileMap = GetJobDirectoryFileMap(WorkingDirectory); if (!JobDirectoryHasChanged(sourceDirectoryFileMap, workingDirectoryFileMap, _cachedSourceDirectoryFileMap, logger)) { // no changes detected, so skip the cache/copy step below return; } } catch (Exception ex) { // Log error and ignore it, since this diff optimization isn't critical. // We'll just do a full copy in this case. logger.LogWarning("Failed to diff WebJob directories for changes. Continuing to copy WebJob binaries (this will not affect the WebJob run)\n" + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath, _analytics); _workingDirectory = tempJobInstancePath; // cache the file map snapshot for next time (to aid in detecting // file deletions) _cachedSourceDirectoryFileMap = sourceDirectoryFileMap; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled _workingDirectory = null; } }
private void CacheJobBinaries(IJobLogger logger) { if (WorkingDirectory != null) { int currentHash = CalculateHashForJob(JobBinariesPath); int lastHash = CalculateHashForJob(WorkingDirectory); if (lastHash == currentHash) { return; } } SafeKillAllRunningJobInstances(logger); if (FileSystem.Directory.Exists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, true); } if (FileSystem.Directory.Exists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(FileSystem, JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath); WorkingDirectory = tempJobInstancePath; } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); // job disabled WorkingDirectory = null; } }