public void Run() { Connection connection = new Connection(this.connCfg); Worker worker = new Worker(connection, this.workerCfg); if (this.handler is ServiceHandler) { ServiceHandler serviceHandler = this.handler as ServiceHandler; while (true) { ZMsg request = worker.Recv(); if (request == null) { break; //interrupted } ZMsg reply = serviceHandler.HandleRequest(request); if (reply != null) { worker.Send(reply); } } } else if (this.handler is WorkerHandler) { WorkerHandler workerHanlder = this.handler as WorkerHandler; workerHanlder.HandleWorker(worker); } else { throw new ZBusException("handler invalid"); } worker.Dispose(); connection.Dispose(); }
private static bool Run() { const int numFetchers = 9; const int numParsers = 3; Console.Write("Starting page: "); string startPage = Console.ReadLine(); if (startPage.Length == 0) { startPage = @"https://fi.wikipedia.org/wiki/Hyperlinkki"; } Console.Write("Goal page: "); string goalPage = Console.ReadLine(); if (goalPage.Length == 0) { goalPage = @"https://fi.wikipedia.org/wiki/Fyysikko"; } Uri goal = new Uri(goalPage, UriKind.Absolute); Console.WriteLine($"Looking for a path from {startPage} to {goalPage}"); DataHandler dataHandler = new DataHandler(); WorkerHandler workerHandler = new WorkerHandler(dataHandler, goal.Host); workerHandler.AddFetchers(numFetchers); workerHandler.AddParsers(numParsers); workerHandler.AddFetchJob(new FetcherArgs() { Url = startPage }); Stopwatch logged = Stopwatch.StartNew(); while (!dataHandler.HasFoundPage(goalPage)) { workerHandler.StartNextFetchJob(); workerHandler.StartNextParseJob(); if (!Program.Verbose && logged.Elapsed.TotalSeconds >= 5) { Console.WriteLine($"Visited pages: {dataHandler.PagesVisited}\tTo fetch: {workerHandler.FetchQueueCount}\tTo parse: {workerHandler.ParseQueueCount}"); logged.Restart(); } Thread.Yield(); } workerHandler.Stop(); Console.WriteLine("Goal page found!"); Console.WriteLine(dataHandler.GetFinalResult(goalPage).ToString()); Console.Write("Restart? "); return(Regex.IsMatch(Console.ReadLine(), "[1yY]")); }
public MainWindow() { DataContext = ViewModel; InitializeComponent(); Title = Declarations.APPLICATION_NAME; var searchItemFetcher = new SearchItemWorker(ViewModel); var checkTradePileWorker = new RefreshTradePileWorker(ViewModel); var workerHandler = new WorkerHandler(ViewModel, searchItemFetcher, checkTradePileWorker); Validate = new Validate(ViewModel); General = new General(ViewModel); File = new File(ViewModel); nextRunTime = DateTime.Now; addDelay = new TimeSpan(0, 0, 0); var x = Task.Run(async() => { while (true) { if (ViewModel.IsConnected && DateTime.Now > nextRunTime) { try { await workerHandler.RunWorkers(); } catch (HandledException ex) { if (ex.ForceDisconnect) { FifaMessageBox.Show(ex.Message); } ViewModel.IsConnected = !ex.ForceDisconnect; addDelay = new TimeSpan(0, ex.Delay, 0); if (ex.ClearSessionID) { ViewModel.SessionID = ""; } } catch (Exception ex) { FifaMessageBox.Show(ex.Message); ViewModel.IsConnected = false; } nextRunTime = DateTime.Now + new TimeSpan(0, 0, 5) + addDelay; addDelay = new TimeSpan(0, 0, 0); } Thread.Sleep(100); } }); }
private async void CheckPendingSubmissions() { var db = new LiteDBDatabase(); var pendingJobs = db.GetPendingSubmissions(); if (!pendingJobs.Any()) { Log.Debug("No Pending Jobs found"); return; } Log.Debug($"{pendingJobs.Count} pending jobs found..."); var workerHandler = new WorkerHandler(_serverURL); foreach (var pJob in pendingJobs) { Jobs job = null; try { job = JsonConvert.DeserializeObject <Jobs>(pJob.JobJSON); } catch (Exception ex) { Log.Error($"For Job ID {pJob.ID}, could not parse {pJob.JobJSON} into a Jobs object due to {ex}"); } if (job == null) { Log.Error($"Job was null - removing {pJob.ID} from Queue"); _db.RemoveOfflineSubmission(pJob.ID); continue; } var result = await workerHandler.UpdateWorkAsync(job); if (result) { Log.Debug($"{job.ID} was successfully uploaded"); _db.RemoveOfflineSubmission(pJob.ID); continue; } Log.Debug($"{job.ID} was not able to be uploaded - will retry at a later date and time"); } }
private async void BwCheckin_DoWork(object sender, DoWorkEventArgs e) { var submissionManager = new SubmissionManager(new DatabaseManager(_db, null)); var pendingJobs = submissionManager.GetPendingSubmissions(); if (!pendingJobs.Any()) { Log.Debug("No Pending Jobs found"); return; } Log.Debug($"{pendingJobs.Count} pending jobs found..."); var workerHandler = new WorkerHandler(_config.WebServiceURL, _config.RegistrationKey); foreach (var pJob in pendingJobs) { Jobs job = null; try { job = JsonConvert.DeserializeObject <Jobs>(pJob.JobJSON); } catch (Exception ex) { Log.Error($"For Job ID {pJob.ID}, could not parse {pJob.JobJSON} into a Jobs object due to {ex}"); } if (job == null) { Log.Error($"Job was null - removing {pJob.ID} from Queue"); submissionManager.RemoveOfflineSubmission(pJob.ID); continue; } var result = await workerHandler.UpdateWorkAsync(job); if (result) { Log.Debug($"{job.ID} was successfully uploaded"); submissionManager.RemoveOfflineSubmission(pJob.ID); continue; } Log.Debug($"{job.ID} was not able to be uploaded - will retry at a later date and time"); } }
public StatsBufferize( BufferBuilder bufferBuilder, int workerMaxItemCount, TimeSpan?blockingQueueTimeout, TimeSpan maxIdleWaitBeforeSending) { var handler = new WorkerHandler(bufferBuilder, maxIdleWaitBeforeSending); // `handler` (and also `bufferBuilder`) do not need to be thread safe as long as workerMaxItemCount is 1. this._worker = new AsynchronousWorker <string>(handler, new Waiter(), 1, workerMaxItemCount, blockingQueueTimeout); }
public StatsBufferize( StatsRouter statsRouter, int workerMaxItemCount, TimeSpan?blockingQueueTimeout, TimeSpan maxIdleWaitBeforeSending) { var handler = new WorkerHandler(statsRouter, maxIdleWaitBeforeSending); // `handler` (and also `statsRouter`) do not need to be thread safe as long as `workerThreadCount` is 1. this._worker = new AsynchronousWorker <Stats>( handler, new Waiter(), workerThreadCount: 1, workerMaxItemCount, blockingQueueTimeout); }
private async void BwCheckin_DoWork(object sender, DoWorkEventArgs e) { var hostHandler = new WorkerHandler(_config.WebServiceURL, _config.RegistrationKey); // Call to checkin with the server var checkinResult = await hostHandler.AddUpdateWorkerAsync(_worker); if (checkinResult) { return; } Log.Error($"Failed to check in with {_config.WebServiceURL}"); System.Threading.Thread.Sleep(Constants.LOOP_ERROR_INTERVAL_MS); }
public void WorkerHandler_Null() { var wHandler = new WorkerHandler(null, null); }
public async Task HostsHandler_DefaultAddUpdateHostAsync() { var hHandler = new WorkerHandler(TEST_VALID_URL, "test"); await hHandler.AddUpdateWorkerAsync(new DMTP.lib.dal.Databases.Tables.Workers()); }
public async Task HostsHandler_NullAddUpdateHostAsync() { var hHandler = new WorkerHandler(TEST_VALID_URL, "test"); await hHandler.AddUpdateWorkerAsync(null); }
public void HostsHandler_EmptyString() { var hHandler = new WorkerHandler(string.Empty, string.Empty); }
public void HostsHandler_Null() { var hHandler = new WorkerHandler(null, null); }
public async Task <bool> Run(Hosts host, string serverURL) { _host = host; _serverURL = serverURL; CheckPendingSubmissions(); var workerHandler = new WorkerHandler(_serverURL); var work = await workerHandler.GetWorkAsync(_host.Name); if (work == null) { System.Threading.Thread.Sleep(Constants.LOOP_INTERVAL_MS); return(false); } work.Started = true; work.StartTime = DateTime.Now; var result = await workerHandler.UpdateWorkAsync(work); if (!result) { System.Threading.Thread.Sleep(Constants.LOOP_INTERVAL_MS); return(false); } if (!Directory.Exists(work.TrainingDataPath)) { work.Completed = true; work.Debug = $"Path ({work.TrainingDataPath}) does not exist"; work.CompletedTime = DateTime.Now; result = await workerHandler.UpdateWorkAsync(work); if (!result) { AddToPending(work); } return(false); } var options = new TrainerCommandLineOptions { FolderOfData = work.TrainingDataPath, LogLevel = LogLevels.DEBUG }; var(outputFile, metrics) = (string.Empty, string.Empty); switch (Enum.Parse <ModelType>(work.ModelType, true)) { case ModelType.CLASSIFICATION: (outputFile, metrics) = new ClassificationEngine().TrainModel(options); break; case ModelType.CLUSTERING: (outputFile, metrics) = new ClusteringEngine().TrainModel(options); break; } if (File.Exists(outputFile)) { work.Model = File.ReadAllBytes(outputFile); } work.ModelEvaluationMetrics = metrics; work.Completed = true; work.CompletedTime = DateTime.Now; result = await workerHandler.UpdateWorkAsync(work); if (result) { Console.WriteLine($"Successfully trained model and saved to {outputFile}"); } else { AddToPending(work); } return(result); }
public async Task <bool> Run(Workers worker, Config config) { _worker = worker; _config = config; var workerHandler = new WorkerHandler(_config.WebServiceURL, _config.RegistrationKey); var work = await workerHandler.GetWorkAsync(_worker.Name); if (work == null) { Log.Debug($"No work or connection issues to {_config.WebServiceURL}, waiting until next interval"); System.Threading.Thread.Sleep(Constants.LOOP_INTERVAL_MS); return(false); } work.Started = true; work.StartTime = DateTime.Now; var result = await workerHandler.UpdateWorkAsync(work); if (!result) { System.Threading.Thread.Sleep(Constants.LOOP_INTERVAL_MS); return(false); } if (!Directory.Exists(work.TrainingDataPath)) { work.Completed = true; work.Debug = $"Path ({work.TrainingDataPath}) does not exist"; work.CompletedTime = DateTime.Now; result = await workerHandler.UpdateWorkAsync(work); if (!result) { AddToPending(work); } return(false); } var options = new TrainerCommandLineOptions { FolderOfData = work.TrainingDataPath, LogLevel = LogLevels.DEBUG }; var featureExtractor = Assembly.Load(work.FeatureExtractorBytes); if (featureExtractor == null) { work.Debug = "Feature Extractor Assembly was not piped to the worker"; return(false); } var extractor = featureExtractor.GetTypes() .Where(a => a.BaseType == typeof(BasePrediction) && !a.IsAbstract) .Select(a => ((BasePrediction)Activator.CreateInstance(a))) .FirstOrDefault(a => a.MODEL_NAME == work.ModelType); if (extractor == null) { work.Debug = $"Failed to load {work.ModelType} from piped in assembly"; return(false); } var(outputFile, metrics) = extractor.TrainModel(options); if (File.Exists(outputFile)) { work.Model = File.ReadAllBytes(outputFile); } work.ModelEvaluationMetrics = metrics; work.Completed = true; work.CompletedTime = DateTime.Now; result = await workerHandler.UpdateWorkAsync(work); if (result) { Log.Debug($"{work.ID}.{work.Name} - was successfully trained and saved to {outputFile}"); Console.WriteLine($"Successfully trained model and saved to {outputFile}"); } else { AddToPending(work); } return(result); }
public void Run(int threadCount, WorkerHandler handler) { this.RunHandler(threadCount, handler); }