private void AppendDetailsToJob(BackgrounderJob job, BsonDocument endEvent) { DateTime endTime = BsonDocumentHelper.GetDateTime("ts", endEvent); var eventsInJobRange = MongoQueryHelper.GetEventsInRange(backgrounderJavaCollection, job.WorkerId, job.BackgrounderId, job.StartTime, endTime).ToList(); // Append all errors associated with job. job.Errors = CollectErrorsForJob(job, eventsInJobRange); // Append details for certain job types of interest. if (job.JobType.Equals("refresh_extracts") || job.JobType.Equals("increment_extracts")) { BackgrounderExtractJobDetail extractJobDetail = new BackgrounderExtractJobDetail(job, GetVqlSessionServiceEvents(eventsInJobRange)); if (!String.IsNullOrEmpty(extractJobDetail.VizqlSessionId)) { job.BackgrounderJobDetail = extractJobDetail; } } else if (job.JobType.Equals("single_subscription_notify")) { List <BsonDocument> eventList = new List <BsonDocument>(GetVqlSessionServiceEvents(eventsInJobRange)); eventList.AddRange(GetSubscriptionRunnerEvents(eventsInJobRange)); BackgrounderSubscriptionJobDetail subscriptionJobDetail = new BackgrounderSubscriptionJobDetail(job, eventList); if (!String.IsNullOrEmpty(subscriptionJobDetail.VizqlSessionId)) { job.BackgrounderJobDetail = subscriptionJobDetail; } } }
public void AddEndEvent(BackgrounderJob endEvent) { if (_events.ContainsKey(endEvent.JobId)) { var existingEvent = _events[endEvent.JobId]; existingEvent.EndEvent = endEvent; PersistEventIfItIsComplete(existingEvent); } else { _events.Add(endEvent.JobId, new BackgrounderEvent { EndEvent = endEvent }); } }
public void AddStartEvent(BackgrounderJob startEvent) { if (_events.ContainsKey(startEvent.JobId)) { var existingEvent = _events[startEvent.JobId]; existingEvent.StartEvent = startEvent; } else { _events.Add(startEvent.JobId, new BackgrounderEvent { StartEvent = startEvent }); } _latestStartEvents.AddWatermark(startEvent); }
public void AddWatermark(BackgrounderJob startEvent) { var key = GetKeyForWorkerAndBackgrounder(startEvent.WorkerId, startEvent.BackgrounderId); if (_latestStartEvents.ContainsKey(key)) { if (startEvent.StartTime > _latestStartEvents[key]) { _latestStartEvents[key] = startEvent.StartTime; } } else { _latestStartEvents.Add(key, startEvent.StartTime); } }
private IEnumerable <BackgrounderJob> ProcessJobsForBackgrounderId(string workerId, int backgrounderId, string jobType, IMongoCollection <BsonDocument> collection) { var recordQueue = new Queue <BsonDocument>(MongoQueryHelper.GetJobEventsForProcessByType(collection, workerId, backgrounderId, jobType)); while (recordQueue.Count >= 2) { // This logic is a bit messy but unfortunately our backgrounder logs are messy. // We pop the next element off the record queue, make sure its a valid start event and then peek at the next element to make sure its the corresponding // Completion message, if it is then we go forward with processing, if it isn't then we drop whatever we previously popped and move on. // This prevents one failed completion message from throwing off the ordering of the whole queue. var startEvent = recordQueue.Dequeue(); if (IsValidJobStartEvent(startEvent, jobType)) { BackgrounderJob job = null; if (IsValidJobFinishEvent(recordQueue.Peek(), jobType)) { var endEvent = recordQueue.Dequeue(); try { job = new BackgrounderJob(startEvent, endEvent); job = AppendDetailsToJob(job, endEvent, collection); } catch (Exception ex) { Log.ErrorFormat($"Failed to extract job info from events '{startEvent}' & '{endEvent}': {ex.Message}"); } } // If the next event in the list isnt a finish event then we can assume the previous job timed out. else { try { job = new BackgrounderJob(startEvent, true); job = AppendDetailsToJob(job, recordQueue.Peek(), collection); } catch (Exception ex) { Log.ErrorFormat($"Failed to extract job info from timed-out event '{startEvent}': {ex.Message}"); } } yield return(job); } } }
private ICollection <BackgrounderJobError> CollectErrorsForJob(BackgrounderJob job, IEnumerable <BsonDocument> eventsInJobRange) { var errorsForJob = new List <BackgrounderJobError>(); foreach (BsonDocument eventInJobRange in eventsInJobRange) { if (IsErrorEvent(eventInJobRange)) { if (ErrorDocumentMatchesJobType(job.JobType, eventInJobRange)) { errorsForJob.Add(new BackgrounderJobError(job.JobId, eventInJobRange)); } } } return(errorsForJob); }
private void ProcessJobsForBackgrounderId(string workerId, int backgrounderId, string jobType) { Queue <BsonDocument> recordQueue = new Queue <BsonDocument>(MongoQueryHelper.GetJobEventsForProcessByType(workerId, backgrounderId, jobType, backgrounderJavaCollection)); while (recordQueue.Count >= 2) { // This logic is a bit messy but unfortunately our backgrounder logs are messy. // We pop the next element off the record queue, make sure its a valid start event and then peek at the next element to make sure its the corresponding // Completion message, if it is then we go forward with processing, if it isn't then we drop whatever we previously popped and move on. // This prevents one failed completion message from throwing off the ordering of the whole queue. BsonDocument startEvent = recordQueue.Dequeue(); if (IsValidJobStartEvent(startEvent, jobType)) { if (IsValidJobFinishEvent(recordQueue.Peek(), jobType)) { BsonDocument endEvent = recordQueue.Dequeue(); try { var job = new BackgrounderJob(startEvent, endEvent, logsetHash); AppendDetailsToJob(job, endEvent); backgrounderPersister.Enqueue(job); } catch (Exception ex) { Log.ErrorFormat("Failed to extract job info from events '{0}' & '{1}': {2}", startEvent, endEvent, ex.Message); } } // If the next event in the list isnt a finish event then we can assume the previous job timed out. else { try { var job = new BackgrounderJob(startEvent, true, logsetHash); AppendDetailsToJob(job, recordQueue.Peek()); backgrounderPersister.Enqueue(job); } catch (Exception ex) { Log.ErrorFormat("Failed to extract job info from timed-out event '{0}': {1}", startEvent, ex.Message); } } } } }
public static InsertionResult PersistBackgrounderJob(IPluginRequest pluginRequest, IDbConnection dbConnection, BackgrounderJob backgrounderJob) { try { dbConnection.Insert(backgrounderJob); if (backgrounderJob.Errors != null && backgrounderJob.Errors.Count > 0) { dbConnection.InsertAll(backgrounderJob.Errors); } if (backgrounderJob.BackgrounderJobDetail != null) { if (backgrounderJob.BackgrounderJobDetail is BackgrounderExtractJobDetail) { dbConnection.Insert(backgrounderJob.BackgrounderJobDetail as BackgrounderExtractJobDetail); } if (backgrounderJob.BackgrounderJobDetail is BackgrounderSubscriptionJobDetail) { dbConnection.Insert(backgrounderJob.BackgrounderJobDetail as BackgrounderSubscriptionJobDetail); } } Log.DebugFormat("Persisted Backgrounder Job '{0}' ({1}).", backgrounderJob.JobId, backgrounderJob.JobType); return(new InsertionResult { SuccessfulInserts = 1, FailedInserts = 0 }); } catch (PostgresException ex) { // Log an error only if this isn't a duplicate key exception. if (!ex.SqlState.Equals(PluginLibConstants.POSTGRES_ERROR_CODE_UNIQUE_VIOLATION)) { Log.ErrorFormat("Failed to persist Backgrounder Job '{0}' ({1}): {2}", backgrounderJob.JobId, backgrounderJob.JobType, ex.Message); } return(new InsertionResult { SuccessfulInserts = 0, FailedInserts = 1 }); } catch (NpgsqlException ex) { Log.ErrorFormat("Failed to persist Backgrounder Job '{0}' ({1}): {2}", backgrounderJob.JobId, backgrounderJob.JobType, ex.Message); return(new InsertionResult { SuccessfulInserts = 0, FailedInserts = 1 }); } }
public bool StartedBeforeLatestStartEvent(BackgrounderJob startEvent) { var key = GetKeyForWorkerAndBackgrounder(startEvent.WorkerId, startEvent.BackgrounderId); return(_latestStartEvents.ContainsKey(key) && _latestStartEvents[key] > startEvent.StartTime); }