protected override bool Receive(object message) { if (message is SetJobQueueConfiguration) { var config = message as SetJobQueueConfiguration; WorkerCount = config.NumWorkers; QueueName = config.QueueName; JobQueueActor = Context.ActorOf(config.QueueProps, "jobQueue"); WorkerProps = config.WorkerProps; AggressiveSweep = config.AggressiveSweep; WorkerRouterRef = Context.ActorOf(WorkerProps.WithRouter(new RoundRobinPool(WorkerCount)), "workerPool"); Context.Sender.Tell(new Configured()); } else if (message is ShutDownQueues) { ShutdownRequester = Context.Sender; ShuttingDown = true; //Tell all of the children to stop what they're doing. WorkerRouterRef.Tell(new Broadcast(new ShutDownQueues())); } else if (message is QueueShutDown) { ShutdownCount = ShutdownCount + 1; SaturationPulseCount = 0; if (ShutdownCount == WorkerCount) { //We Do this ask to make sure that all DB commands from the queues have been flushed. var storeShutdown = JobQueueActor.Ask(new ShutDownQueues()).Result as QueueShutDown; //Tell our requester that we are truly done. ShutdownRequester.Tell(new QueueShutDown()); } } else if ((message is JobSweep || message is SilentRetrySweep) && !ShuttingDown) { HandleSweep(message); } else if (message is JobSuceeded) { var msg = (JobSuceeded)message; JobQueueActor.Tell(new MarkJobSuccess(msg.JobData.JobId)); PendingItems = PendingItems - 1; try { OnJobSuccess(msg); } catch (Exception ex) { Context.System.Log.Error(ex, "Error Running OnJobSuccess Handler for Queue {0}, job {1}", QueueName, msg.JobData.JobId); } } else if (message is JobFailed) { var msg = message as JobFailed; PendingItems = PendingItems - 1; if (msg.JobData.RetryParameters == null || msg.JobData.RetryParameters.MaxRetries <= msg.JobData.RetryParameters.RetryCount) { JobQueueActor.Tell(new MarkJobFailed(msg.JobData.JobId)); try { OnJobFailed(msg); } catch (Exception ex) { Context.System.Log.Error(ex, "Error Running OnJobFailed Handler for Queue {0}, job {1}", QueueName, msg.JobData.JobId); } } else { JobQueueActor.Tell(new MarkJobInRetryAndIncrement(msg.JobData.JobId, DateTime.Now)); try { OnJobRetry(msg); } catch (Exception ex) { Context.System.Log.Error(ex, "Error Running OnJobRetry Handler for Queue {0}, job {1}", QueueName, msg.JobData.JobId); } } } else { return(OnCustomMessage(message)); } return(true); }
private void HandleSweep(object message) { //Naieve Backpressure: if (PendingItems < WorkerCount * 2) { SaturationStartTime = null; SaturationPulseCount = 0; IEnumerable <IOddJobWithMetadata> jobsToQueue = null; try { jobsToQueue = JobQueueActor.Ask(new GetJobs(QueueName, WorkerCount), TimeSpan.FromSeconds(30)).Result as IEnumerable <IOddJobWithMetadata>; } catch (Exception ex) { Context.System.Log.Error(ex, "Timeout Retrieving data for Queue {0}", QueueName); try { OnQueueTimeout(ex); } catch (Exception) { Context.System.Log.Error(ex, "Error Running OnQueueTimeout Handler for Queue {0}", QueueName); } } if (jobsToQueue != null) { foreach (var job in jobsToQueue) { if (job.TypeExecutedOn == null) { try { JobQueueActor.Tell(new MarkJobFailed(job.JobId)); OnJobTypeMissing(job); } catch (Exception ex) { Context.System.Log.Error(ex, "Error Running OnQueueTimeout Handler for Queue {0}", QueueName); } } else { WorkerRouterRef.Tell(new ExecuteJobRequest(job)); JobQueueActor.Tell(new MarkJobInProgress(job.JobId)); PendingItems = PendingItems + 1; } } } } else { if (message is JobSweep) { SaturationStartTime = SaturationStartTime ?? DateTime.Now; SaturationPulseCount = SaturationPulseCount + 1; QueueLifeSaturationPulseCount = QueueLifeSaturationPulseCount + 1; try { OnJobQueueSaturated(SaturationStartTime.Value, SaturationPulseCount, QueueLifeSaturationPulseCount); } catch (Exception ex) { Context.System.Log.Error(ex, "Error Running OnJobQueueSaturated Handler for Queue {0}, Saturation Start Time : {1}, number of Saturated pulses {2}, Total Saturated Pulses for Life of Queue: {3}", QueueName, SaturationStartTime.ToString(), SaturationPulseCount, QueueLifeSaturationPulseCount); } } if (AggressiveSweep) { Context.Self.Tell(new SilentRetrySweep()); } } }