public void TestDequeue_MultipleJobs() { JobQueue queue = new JobQueue(); IJobDefinition d = new DudDefinition(); JobRequest r = new JobRequest(d); JobTicket t = new JobTicket(r, new DudHandler()); queue.Enqueue(t); JobRequest r2 = new JobRequest(d); JobTicket t2 = new JobTicket(r2, new DudHandler()); queue.Enqueue(t2); IJobTicket ta = queue.Dequeue(); Assert.AreEqual(t, ta); Assert.IsTrue(queue.HasPendingJobs); Assert.AreEqual(1, queue.NumberOfJobs); IJobTicket tb = queue.Dequeue(); Assert.AreEqual(t2, tb); Assert.IsFalse(queue.HasPendingJobs); Assert.AreEqual(0, queue.NumberOfJobs); }
public void Enqueue_TwoJobsWithSameName_Throws() { //mock var mockJob = new JobDetails("TestName", 1000); var mockJob2 = new JobDetails("TestName", 2000); IDynamicQueue <JobDetails> queue = new JobQueue(null); queue.Enqueue(mockJob); queue.Enqueue(mockJob2); }
public static void JoinWork(Queue <EditorJob> resultJobs = null) { if (mainThread == null) { return; } mainThread.Join(); if (resultJobs != null) { jobExiting.Enqueue(resultJobs); } }
public void Test(int threads, int jobs) { IWorkerMonitor workerMonitor = new TestWorkerMonitor(); IThreadFactory threadFactory = new DefaultThreadFactory(); using (ManualResetEvent stopSignal = new ManualResetEvent(false)) { using (JobQueue <Job> queue = new JobQueue <Job>("Worker queue", stopSignal)) { for (int i = 0; i < threads; i++) { TestWorker worker = new TestWorker(i.ToString(CultureInfo.InvariantCulture), queue, threadFactory, workerMonitor); queue.AddWorker(worker); } queue.StartWorkers(); for (int i = 0; i < jobs; i++) { Job job = new Job("0"); queue.Enqueue(job); } Assert.IsTrue(queue.WaitForQueueToEmpty(TimeSpan.FromSeconds(10))); queue.AssertAllThreadsAlive(); stopSignal.Set(); threadFactory.WaitForAllThreadsToStop(TimeSpan.FromSeconds(5)); Assert.IsTrue(queue.IsEmpty); } } }
/// <summary> /// Dodanie budynku do kolonii /// </summary> /// <param name="construct"></param> public void AddConstruct(Construct construct) { constructs_.Add(construct); float priority = construct.BuildJob.Location.All(field => !field.IsAvaliable) ? 3f : 0.5f; JobQueue.Enqueue(construct.BuildJob, priority); }
/// <summary> /// Simulate this process /// </summary> public override IEnumerator <InstructionBase> Simulate() { // while the simulation is running while (true) { // check if the queue for this machine is empty if (JobQueue.Count == 0) { // if it is, wait until there is something in the queue yield return(new WaitConditionInstruction(() => JobQueue.Count > 0)); } else { // take a job from the queue var jobToProcess = JobQueue.Dequeue(); // simulate processing the job // which takes time yield return(new WaitInstruction(jobToProcess.ProcessingTimeRequiredByJobQueue[JobQueue])); // use the reliability indicator to determine if the machine is broken down if (CheckForRandomBreakdown()) { BreakdownCount++; // the machine has broken down // add the job it was processing back to the queue JobQueue.Enqueue(jobToProcess); // obtain a repair person var allocateInstruction = new AllocateInstruction <RepairPerson>(1); yield return(allocateInstruction); // and wait for the machine to be fixed yield return(new WaitInstruction(RepairTimeRequired)); // then release the repair person resource yield return(new ReleaseInstruction <RepairPerson>(allocateInstruction)); } else { ProcessedCount++; // record the fact that the job has been processed by this machine type jobToProcess.ProcessingTimeRequiredByJobQueue.Remove(JobQueue); // if the job still requires other processing if (jobToProcess.RequiresMoreWork) { // add it to the next queue jobToProcess.ProcessingTimeRequiredByJobQueue.Keys.First().Enqueue(jobToProcess); } else { // otherwise remove it from the all unprocessed jobs list _unprocessedJobsList.Remove(jobToProcess); } } } } }
public void Enqueue_JobWithEmptyName_Throws() { //mock var mockJob = new JobDetails("", 1000); IDynamicQueue <JobDetails> queue = new JobQueue(null); queue.Enqueue(mockJob); }
public void StoppedJobShouldRemainInTheQueue() { _jobQueue = new JobQueue { ProgressDelegate = this }; _jobQueue.Enqueue(new JobToSucceed()); _jobQueue.Execute(); _jobQueue.Count.ShouldEqual(1, "Stopping after first job's first task execution should leave 1 job in the queue."); }
protected override void downloadProductsCtxMenuItem_Click(object sender, RoutedEventArgs e) { FoxeTreeViewNotifications.OnDownloadFoxeProductsUnderPage(new FoxeDownloadProductsUnderPageEventArgs() { FoxePage = this.FoxePage }); JobQueue.Enqueue(new Job(this.FoxePage)); }
public override void CancelJob() { Owner.Collectable = null; var newJob = new ConstructionCollectJob(construction, (Resource)collectable, collectable.Transform.position) { JobQueue = JobQueue }; JobQueue.Enqueue(newJob); }
public void TestEnqueue_InvalidRequest() { JobQueue queue = new JobQueue(); bool eventFired = false; queue.JobAdded += (s, e) => eventFired = true; queue.Enqueue(null); Assert.IsFalse(queue.HasPendingJobs); Assert.IsFalse(eventFired); }
protected override void downloadProductsCtxMenuItem_Click(object sender, RoutedEventArgs e) { FoxeTreeViewNotifications.OnDownloadFoxeProductsUnderCategory(new FoxeDownloadProductsUnderCategoryEventArgs() { FoxeCategory = this.FoxeCategory }); foreach (FoxePage foxePage in this.FoxeCategory.Pages) { JobQueue.Enqueue(new Job(foxePage)); } }
public void TestHandleCancel_Handleable() { JobQueue queue = new JobQueue(); IJobDefinition d = new DudDefinition(); JobRequest r = new JobRequest(d); JobTicket t = new JobTicket(r, new DudHandler()); queue.Enqueue(t); bool result = queue.Handle(t); Assert.IsTrue(result); Assert.IsFalse(queue.HasPendingJobs); }
public static void AddWork(EditorJob job) { if (mainThread == null) { emergencyExit = false; mainThread = new Thread(DispatchThread); mainThread.Name = $"EditorJobSystem.ThreadUpdate"; mainThread.Start(); } jobEntering.Enqueue(job); }
private void AbandonJob() { _nextTile = DestinationTile = CurrentTile; if (_job != null) { JobQueue.Enqueue(_job); } _jobSearchCooldownInSec = 15; _job = null; }
public void Dequeue_AfterEnqueue_ReturnCorrectJobsInOrder() { var mockJob = new JobDetails("TestName", 1000); var mockJob2 = new JobDetails("TestName2", 2000); var mockJob3 = new JobDetails("TestName3", 2000); // IDynamicQueue <JobDetails> queue = new JobQueue(null); queue.Enqueue(mockJob); queue.Enqueue(mockJob2); queue.Enqueue(mockJob3); JobDetails topJob = queue.Dequeue(); Assert.AreEqual(mockJob, topJob); topJob = queue.Dequeue(); Assert.AreEqual(mockJob2, topJob); topJob = queue.Dequeue(); Assert.AreEqual(mockJob3, topJob); }
private static void WorkThreadInOrder() { var frameCount = exitChildFrameCount; while (true) { var newJob = jobInOrder.Dequeue(); if (newJob != null) { var jobStatus = newJob.Execute(); if (jobStatus != RuntimeJob.JobStatus.Working) { jobFinished.Enqueue(newJob); } else { jobInOrder.Enqueue(newJob); } frameCount = exitChildFrameCount; } else { frameCount--; } if (frameCount < 0) { break; } else { Thread.Sleep(waitMilliseconds); } } }
public void TestDequeue_OneJob() { JobQueue queue = new JobQueue(); IJobDefinition d = new DudDefinition(); JobRequest r = new JobRequest(d); JobTicket t = new JobTicket(r, new DudHandler()); queue.Enqueue(t); IJobTicket t1 = queue.Dequeue(); Assert.AreEqual(t, t1); Assert.AreEqual(0, queue.NumberOfJobs); Assert.IsFalse(queue.HasPendingJobs); }
public void TestEnqueue_ValidRequest() { JobQueue queue = new JobQueue(); bool eventFired = false; queue.JobAdded += (s, e) => eventFired = true; IJobDefinition d = new DudDefinition(); JobRequest r = new JobRequest(d); JobTicket t = new JobTicket(r, new DudHandler()); queue.Enqueue(t); Assert.IsTrue(eventFired); Assert.AreEqual(1, queue.NumberOfJobs); Assert.IsTrue(queue.HasPendingJobs); }
public void TestWork_NoWorker() { ObjectJobDefinition d = new ObjectJobDefinition( new PipelineDefinition( new AlgorithmDefinition[] { }), new JobInput[] { }); JobRequest r = new JobRequest(d); JobTicket ticket = new JobTicket(r, new DudCancellationHandler()); JobQueue queue = new JobQueue(); QueueExecutor executor = new QueueExecutor(queue); bool didComplete = false; executor.ExhaustedQueue += (s, e) => didComplete = true; queue.Enqueue(ticket); executor.Start(); }
protected internal void EnqueueJob(JobConf conf, int stepNumber = 0) { Args.ThrowIfNull(conf, "JobConf"); lock (_jobQueueLock) { if (!_isRunning) { StartJobRunnerThread(); } Job job = conf.CreateJob(); job.StepNumber = stepNumber; JobQueue.Enqueue(job); _enqueueSignal.Set(); } }
public async void RaceConditionExcluded() { // Seeding jobs JobQueue <> .Enqueue(new WorkItem("Lol", 0, 0, CancellationToken.None)); JobQueue.Enqueue(new WorkItem("Lol", 0, 0, CancellationToken.None)); var t1 = DequeueJob(); var t2 = DequeueJob(); var ex = await Record.ExceptionAsync(async() => { await Task.WhenAll(t1, t2); }); Assert.Null(ex); }
public void TestWork_OneJob() { ObjectJobDefinition d = new ObjectJobDefinition( new PipelineDefinition( new AlgorithmDefinition[] { }), new JobInput[] { }); JobRequest r = new JobRequest(d); JobTicket ticket = new JobTicket(r, new DudCancellationHandler()); JobQueue queue = new JobQueue(); WorkerImpl worker = new WorkerImpl(); QueueExecutor executor = new QueueExecutor(queue); executor.Worker = worker; executor.PluginFactory = new DudFactory(); executor.Persister = new DudPersister(); bool didComplete = false; executor.ExhaustedQueue += (s, e) => didComplete = true; queue.Enqueue(ticket); executor.Start(); int totalTime = 0; while (didComplete == false) { Thread.Sleep(1); totalTime += 1; if (totalTime > 10000) { Assert.Fail("Did not complete job in less than 10s"); } } Assert.IsTrue(worker.DidWork); }
public void Enqueue_NullJob_Throws() { IDynamicQueue <JobDetails> queue = new JobQueue(null); JobDetails topJob = queue.Enqueue(null); }
public void EnqueueJob(Job operation) { _jobQueue.Enqueue(operation); }
public IActionResult Post( string crawlid, string[] collection, string[] field, string[] select, string q, string job, string and, string or, int skip, int take) { bool isValid = true; ViewBag.JobValidationError = null; ViewBag.TargetCollectionValidationError = null; ViewBag.Collection = collection; ViewBag.Field = field; ViewBag.Q = q; ViewBag.Job = job; if (string.IsNullOrWhiteSpace(job)) { ViewBag.JobValidationError = "Please select a job to execute."; isValid = false; } if (!isValid) { return(View("Index")); } _queue.Enqueue(new CrawlJob( SessionFactory, _queryParser, _model, _log, crawlid, collection, field, q, job, and != null, or != null, skip, take )); return(RedirectToAction(job, "Status", new { crawlid, collection, field, select, q, and = (and != null ? "AND" : null), or = (or != null ? "OR" : null), skip, take })); }
public Task Send(Packet packet) => sendQueue.Enqueue(packet, false, priority: true);
private static void DispatchThread() { var frameCount = exitMainFrameCount; while (true) { var tempJobs = new Queue <EditorJob>(); jobEntering.Dequeue(tempJobs); var inOrderJobs = new Queue <EditorJob>(); var outOfOrderJobs = new Queue <EditorJob>(); while (tempJobs.Count > 0) { var job = tempJobs.Dequeue(); if (job.DispatchInOrder) { inOrderJobs.Enqueue(job); } else { outOfOrderJobs.Enqueue(job); } } jobInOrder.Enqueue(inOrderJobs); jobOutOfOrder.Enqueue(outOfOrderJobs); if (inOrderJobs.Count > 0) { StartInOrderThread(); } if (outOfOrderJobs.Count > 0) { StartOutOfOrderThreads(); } jobFinished.Dequeue(tempJobs); jobExiting.Enqueue(tempJobs); bool resetFrameCount = false; lock (inOrderLock) { if (inOrderThread == null || !inOrderThread.IsAlive) { resetFrameCount = true; } } lock (outOfOrderLock) { if (outOfOrderThreads.Count > 0) { UnsafeCleanOutOfOrderThreads(); resetFrameCount = true; } } if (resetFrameCount) { frameCount = exitMainFrameCount; } else { frameCount--; } if (frameCount < 0 || emergencyExit) { break; } else { Thread.Sleep(waitMilliseconds); } } mainThread = null; }
public void EnqueueJob(Job operation) { _storage.RequestData("data id here", operation.Input[0].Type); _jobQueue.Enqueue(operation); }
public override void CancelJob() { Owner.Collectable = null; JobQueue.Enqueue(new CollectJob(collectable, collectable.Transform.position)); }
public void AddJob(TJob job) { JobQueue.Enqueue(job); AddJobWaitHandle.Set(); }