public static void Clean(int deletionBatchSize, DocumentDatabase database, DateTime expiryThreshold) { var stopwatch = Stopwatch.StartNew(); var items = new List <ICommandData>(deletionBatchSize); try { var query = new IndexQuery { Start = 0, DisableCaching = true, Cutoff = SystemTime.UtcNow, PageSize = deletionBatchSize, Query = $"LastModified:[* TO {expiryThreshold.Ticks}]", FieldsToFetch = new[] { "__document_id", }, SortedFields = new[] { new SortedField("LastModified") { Field = "LastModified", Descending = false } } }; var indexName = new ExpirySagaAuditIndex().IndexName; database.Query(indexName, query, database.WorkContext.CancellationToken, doc => { var id = doc.Value <string>("__document_id"); if (string.IsNullOrEmpty(id)) { return; } items.Add(new DeleteCommandData { Key = id }); }); } catch (OperationCanceledException) { //Ignore } var deletionCount = 0; Chunker.ExecuteInChunks(items.Count, (s, e) => { logger.InfoFormat("Batching deletion of {0}-{1} sagahistory documents.", s, e); var results = database.Batch(items.GetRange(s, e - s + 1), CancellationToken.None); logger.InfoFormat("Batching deletion of {0}-{1} sagahistory documents completed.", s, e); deletionCount += results.Count(x => x.Deleted == true); }); if (deletionCount == 0) { logger.Info("No expired sagahistory documents found"); } else { logger.InfoFormat("Deleted {0} expired sagahistory documents. Batch execution took {1}ms", deletionCount, stopwatch.ElapsedMilliseconds); } }
public static void Clean(int deletionBatchSize, DocumentDatabase database, DateTime expiryThreshold, CancellationToken token) { var stopwatch = Stopwatch.StartNew(); var items = new List <ICommandData>(deletionBatchSize); var attachments = new List <string>(deletionBatchSize); var itemsAndAttachements = Tuple.Create(items, attachments); try { var query = new IndexQuery { Start = 0, PageSize = deletionBatchSize, Cutoff = SystemTime.UtcNow, DisableCaching = true, Query = $"ProcessedAt:[* TO {expiryThreshold.Ticks}]", FieldsToFetch = new[] { "__document_id", "MessageMetadata.MessageId", "MessageMetadata.BodyNotStored" }, SortedFields = new[] { new SortedField("ProcessedAt") { Field = "ProcessedAt", Descending = false } } }; var indexName = new ExpiryProcessedMessageIndex().IndexName; database.Query(indexName, query, token, (doc, state) => { var id = doc.Value <string>("__document_id"); if (string.IsNullOrEmpty(id)) { return; } state.Item1.Add(new DeleteCommandData { Key = id }); if (TryGetBodyId(doc, out var bodyId)) { state.Item2.Add(bodyId); } }, itemsAndAttachements); } catch (OperationCanceledException) { logger.Info("Cleanup operation cancelled"); return; } if (token.IsCancellationRequested) { return; } var deletedAuditDocuments = Chunker.ExecuteInChunks(items.Count, (itemsForBatch, db, s, e) => { if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} audit documents."); } var results = db.Batch(itemsForBatch.GetRange(s, e - s + 1), CancellationToken.None); if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} audit documents completed."); } return(results.Count(x => x.Deleted == true)); }, items, database, token); var deletedAttachments = Chunker.ExecuteInChunks(attachments.Count, (att, db, s, e) => { var deleted = 0; if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} attachment audit documents."); } db.TransactionalStorage.Batch(accessor => { for (var idx = s; idx <= e; idx++) { //We want to continue using attachments for now #pragma warning disable 618 accessor.Attachments.DeleteAttachment(att[idx], null); #pragma warning restore 618 deleted++; } }); if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} attachment audit documents completed."); } return(deleted); }, attachments, database, token); if (deletedAttachments + deletedAuditDocuments == 0) { logger.Info("No expired audit documents found"); } else { logger.Info($"Deleted {deletedAuditDocuments} expired audit documents and {deletedAttachments} message body attachments. Batch execution took {stopwatch.ElapsedMilliseconds} ms"); } }
public static void Clean(int deletionBatchSize, DocumentDatabase database, DateTime expiryThreshold, CancellationToken token) { var stopwatch = Stopwatch.StartNew(); var items = new List <ICommandData>(deletionBatchSize); var attachments = new List <string>(deletionBatchSize); var failedRetryItems = new List <ICommandData>(deletionBatchSize); var itemsAndAttachements = new { items, attachments, failedRetryItems }; try { var query = new IndexQuery { Start = 0, PageSize = deletionBatchSize, Cutoff = SystemTime.UtcNow, DisableCaching = true, Query = $"Status:[2 TO 4] AND LastModified:[* TO {expiryThreshold.Ticks}]", FieldsToFetch = new[] { "__document_id", "ProcessingAttempts[0].MessageId" }, SortedFields = new[] { new SortedField("LastModified") { Field = "LastModified", Descending = false } } }; var indexName = new ExpiryErrorMessageIndex().IndexName; database.Query(indexName, query, token, (doc, state) => { var id = doc.Value <string>("__document_id"); if (string.IsNullOrEmpty(id)) { return; } var failedMessageRetryId = FailedMessageRetry.MakeDocumentId(FailedMessage.GetMessageIdFromDocumentId(id)); state.failedRetryItems.Add(new DeleteCommandData { Key = failedMessageRetryId }); state.items.Add(new DeleteCommandData { Key = id }); var bodyid = doc.Value <string>("ProcessingAttempts[0].MessageId"); state.attachments.Add(bodyid); }, itemsAndAttachements); } catch (OperationCanceledException) { logger.Info("Cleanup operation cancelled"); return; } if (token.IsCancellationRequested) { return; } var deletedFailedMessageRetry = Chunker.ExecuteInChunks(failedRetryItems.Count, (itemsForBatch, db, s, e) => { if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} FailedMessageRetry documents."); } var results = db.Batch(itemsForBatch.GetRange(s, e - s + 1), CancellationToken.None); if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} FailedMessageRetry documents completed."); } return(results.Count(x => x.Deleted == true)); }, failedRetryItems, database, token); var deletedAttachments = Chunker.ExecuteInChunks(attachments.Count, (atts, db, s, e) => { var deleted = 0; if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} attachment error documents."); } db.TransactionalStorage.Batch(accessor => { for (var idx = s; idx <= e; idx++) { //We want to continue using attachments for now #pragma warning disable 618 accessor.Attachments.DeleteAttachment("messagebodies/" + attachments[idx], null); #pragma warning restore 618 deleted++; } }); if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} attachment error documents completed."); } return(deleted); }, attachments, database, token); var deletedFailedMessage = Chunker.ExecuteInChunks(items.Count, (itemsForBatch, db, s, e) => { if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} error documents."); } var results = db.Batch(itemsForBatch.GetRange(s, e - s + 1), CancellationToken.None); if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} error documents completed."); } return(results.Count(x => x.Deleted == true)); }, items, database, token); if (deletedFailedMessage + deletedAttachments + deletedFailedMessageRetry == 0) { logger.Info("No expired error documents found"); } else { logger.Info($"Deleted {deletedFailedMessage} expired error documents and {deletedAttachments} message body attachments. Batch execution took {stopwatch.ElapsedMilliseconds} ms"); } }
public static void Clean(int deletionBatchSize, DocumentDatabase database, DateTime expiryThreshold, CancellationToken token) { var stopwatch = Stopwatch.StartNew(); var items = new List <ICommandData>(deletionBatchSize); try { var query = new IndexQuery { Start = 0, DisableCaching = true, Cutoff = SystemTime.UtcNow, PageSize = deletionBatchSize, Query = $"LastModified:[* TO {expiryThreshold.Ticks}]", FieldsToFetch = new[] { "__document_id" }, SortedFields = new[] { new SortedField("LastModified") { Field = "LastModified", Descending = false } } }; var indexName = new ExpirySagaAuditIndex().IndexName; database.Query(indexName, query, token, (doc, commands) => { var id = doc.Value <string>("__document_id"); if (string.IsNullOrEmpty(id)) { return; } commands.Add(new DeleteCommandData { Key = id }); }, items); } catch (OperationCanceledException) { logger.Info("Cleanup operation cancelled"); return; } if (token.IsCancellationRequested) { return; } var deletionCount = Chunker.ExecuteInChunks(items.Count, (itemsForBatch, db, s, e) => { if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} saga history documents."); } var results = db.Batch(itemsForBatch.GetRange(s, e - s + 1), CancellationToken.None); if (logger.IsDebugEnabled) { logger.Debug($"Batching deletion of {s}-{e} saga history documents completed."); } return(results.Count(x => x.Deleted == true)); }, items, database, token); if (deletionCount == 0) { logger.Info("No expired saga history documents found"); } else { logger.Info($"Deleted {deletionCount} expired saga history documents. Batch execution took {stopwatch.ElapsedMilliseconds} ms"); } }
public static void Clean(int deletionBatchSize, DocumentDatabase database, DateTime expiryThreshold) { var stopwatch = Stopwatch.StartNew(); var items = new List <ICommandData>(deletionBatchSize); var attachments = new List <string>(deletionBatchSize); try { var query = new IndexQuery { Start = 0, PageSize = deletionBatchSize, Cutoff = SystemTime.UtcNow, DisableCaching = true, Query = $"Status:[2 TO 4] AND LastModified:[* TO {expiryThreshold.Ticks}]", FieldsToFetch = new[] { "__document_id", "ProcessingAttempts[0].MessageId" }, SortedFields = new[] { new SortedField("LastModified") { Field = "LastModified", Descending = false } } }; var indexName = new ExpiryErrorMessageIndex().IndexName; database.Query(indexName, query, database.WorkContext.CancellationToken, null, doc => { var id = doc.Value <string>("__document_id"); if (string.IsNullOrEmpty(id)) { return; } items.Add(new DeleteCommandData { Key = id }); var bodyid = doc.Value <string>("ProcessingAttempts[0].MessageId"); attachments.Add(bodyid); }); } catch (OperationCanceledException) { //Ignore } var deletionCount = 0; Chunker.ExecuteInChunks(items.Count, (s, e) => { logger.InfoFormat("Batching deletion of {0}-{1} error documents.", s, e); var results = database.Batch(items.GetRange(s, e - s + 1)); logger.InfoFormat("Batching deletion of {0}-{1} error documents completed.", s, e); deletionCount += results.Count(x => x.Deleted == true); }); Chunker.ExecuteInChunks(attachments.Count, (s, e) => { database.TransactionalStorage.Batch(accessor => { logger.InfoFormat("Batching deletion of {0}-{1} attachment error documents.", s, e); for (var idx = s; idx <= e; idx++) { accessor.Attachments.DeleteAttachment("messagebodies/" + attachments[idx], null); } logger.InfoFormat("Batching deletion of {0}-{1} attachment error documents completed.", s, e); }); }); if (deletionCount == 0) { logger.Info("No expired error documents found"); } else { logger.InfoFormat("Deleted {0} expired error documents. Batch execution took {1}ms", deletionCount, stopwatch.ElapsedMilliseconds); } }
public static void Clean(int deletionBatchSize, DocumentDatabase database, DateTime expiryThreshold) { var stopwatch = Stopwatch.StartNew(); var items = new List <ICommandData>(deletionBatchSize); var attachments = new List <string>(deletionBatchSize); try { var query = new IndexQuery { Start = 0, PageSize = deletionBatchSize, Cutoff = SystemTime.UtcNow, DisableCaching = true, Query = $"ProcessedAt:[* TO {expiryThreshold.Ticks}]", FieldsToFetch = new[] { "__document_id", "MessageMetadata" }, SortedFields = new[] { new SortedField("ProcessedAt") { Field = "ProcessedAt", Descending = false } } }; var indexName = new ExpiryProcessedMessageIndex().IndexName; database.Query(indexName, query, database.WorkContext.CancellationToken, doc => { var id = doc.Value <string>("__document_id"); if (string.IsNullOrEmpty(id)) { return; } items.Add(new DeleteCommandData { Key = id }); string bodyId; if (TryGetBodyId(doc, out bodyId)) { attachments.Add(bodyId); } }); } catch (OperationCanceledException) { //Ignore } var deletionCount = 0; Chunker.ExecuteInChunks(items.Count, (s, e) => { logger.InfoFormat("Batching deletion of {0}-{1} audit documents.", s, e); var results = database.Batch(items.GetRange(s, e - s + 1), CancellationToken.None); logger.InfoFormat("Batching deletion of {0}-{1} audit documents completed.", s, e); deletionCount += results.Count(x => x.Deleted == true); }); Chunker.ExecuteInChunks(attachments.Count, (s, e) => { database.TransactionalStorage.Batch(accessor => { logger.InfoFormat("Batching deletion of {0}-{1} attachment audit documents.", s, e); for (var idx = s; idx <= e; idx++) { //We want to continue using attachments for now #pragma warning disable 618 accessor.Attachments.DeleteAttachment(attachments[idx], null); #pragma warning restore 618 } logger.InfoFormat("Batching deletion of {0}-{1} attachment audit documents completed.", s, e); }); }); if (deletionCount == 0) { logger.Info("No expired audit documents found"); } else { logger.InfoFormat("Deleted {0} expired audit documents. Batch execution took {1}ms", deletionCount, stopwatch.ElapsedMilliseconds); } }