public PatchRequests() { using (var store = new DocumentStore()) using (var session = store.OpenSession()) { #region patch_firstName_generic // change FirstName to Robert session.Advanced.Patch <Employee, string>( "employees/1", x => x.FirstName, "Robert"); session.SaveChanges(); #endregion #region patch_firstName_non_generic_session // change FirstName to Robert session.Advanced.Defer(new PatchCommandData( id: "employees/1", changeVector: null, patch: new PatchRequest { Script = @"this.FirstName = args.FirstName;", Values = { { "FirstName", "Robert" } } }, patchIfMissing: null)); session.SaveChanges(); #endregion #region patch_firstName_non_generic_store // change FirstName to Robert store.Operations.Send(new PatchOperation( id: "employees/1", changeVector: null, patch: new PatchRequest { Script = @"this.FirstName = args.FirstName;", Values = { { "FirstName", "Robert" } } }, patchIfMissing: null)); #endregion #region patch_firstName_and_lastName_generic // change FirstName to Robert and LastName to Carter in single request // note that in this case, we create single request, but two seperate batch operations // in order to achieve atomicity, please use the non generic APIs session.Advanced.Patch <Employee, string>("employees/1", x => x.FirstName, "Robert"); session.Advanced.Patch <Employee, string>("employees/1", x => x.LastName, "Carter"); session.SaveChanges(); #endregion #region pathc_firstName_and_lastName_non_generic_session // change FirstName to Robert and LastName to Carter in single request // note that here we do maintain the atomicity of the operation session.Advanced.Defer(new PatchCommandData( id: "employees/1", changeVector: null, patch: new PatchRequest { Script = @" this.FirstName = args.UserName.FirstName this.LastName = args.UserName.LastName", Values = { { "UserName", new { FirstName = "Robert", LastName = "Carter" } } } }, patchIfMissing: null)); session.SaveChanges(); #endregion #region pathc_firstName_and_lastName_store // change FirstName to Robert and LastName to Carter in single request // note that here we do maintain the atomicity of the operation store.Operations.Send(new PatchOperation( id: "employees/1", changeVector: null, patch: new PatchRequest { Script = @" this.FirstName = args.UserName.FirstName this.LastName = args.UserName.LastName", Values = { { "UserName", new { FirstName = "Robert", LastName = "Carter" } } } }, patchIfMissing: null)); #endregion #region increment_age_generic // increment UnitsInStock property value by 10 session.Advanced.Increment <Product, int>("products/1-A", x => x.UnitsInStock, 10); session.SaveChanges(); #endregion #region increment_age_non_generic_session session.Advanced.Defer(new PatchCommandData( id: "products/1-A", changeVector: null, patch: new PatchRequest { Script = @"this.UnitsInStock += args.UnitsToAdd", Values = { { "UnitsToAdd", 10 } } }, patchIfMissing: null)); session.SaveChanges(); #endregion #region increment_age_non_generic_store store.Operations.Send(new PatchOperation( id: "products/1-A", changeVector: null, patch: new PatchRequest { Script = @"this.UnitsInStock += args.UnitsToAdd", Values = { { "UnitsToAdd", 10 } } }, patchIfMissing: null)); #endregion #region remove_property_age_non_generic_session // remove property Age session.Advanced.Defer(new PatchCommandData( id: "employees/1", changeVector: null, patch: new PatchRequest { Script = @"delete this.Age" }, patchIfMissing: null)); session.SaveChanges(); #endregion #region remove_property_age_store // remove property Age store.Operations.Send(new PatchOperation( id: "employees/1", changeVector: null, patch: new PatchRequest { Script = @"delete this.Age" }, patchIfMissing: null)); #endregion #region rename_property_age_non_generic_session // rename FirstName to First session.Advanced.Defer(new PatchCommandData( id: "employees/1", changeVector: null, patch: new PatchRequest { Script = @" var firstName = this[args.Rename.Old]; delete this[args.Rename.Old]; this[args.Rename.New] = firstName", Values = { { "Rename", new { Old = "FirstName", New = "Name" } } } }, patchIfMissing: null)); session.SaveChanges(); #endregion #region rename_property_age_store store.Operations.Send(new PatchOperation( id: "employees/1", changeVector: null, patch: new PatchRequest { Script = @" var firstName = this[args.Rename.Old]; delete this[args.Rename.Old]; this[args.Rename.New] = firstName", Values = { { "Rename", new { Old = "FirstName", New = "Name" } } } }, patchIfMissing: null)); #endregion #region add_new_comment_to_comments_generic_session // add a new comment to Comments session.Advanced.Patch <BlogPost, BlogComment>("blogposts/1", x => x.Comments, comments => comments.Add(new BlogComment { Content = "Lore ipsum", Title = "Some title" })); session.SaveChanges(); #endregion #region add_new_comment_to_comments_non_generic_session // add a new comment to Comments session.Advanced.Defer(new PatchCommandData( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = "this.Comments.push(args.Comment)", Values = { { "Comment", new BlogComment { Content = "Lore ipsum", Title = "Some title" } } } }, patchIfMissing: null)); session.SaveChanges(); #endregion #region add_new_comment_to_comments_store // add a new comment to Comments store.Operations.Send(new PatchOperation( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = "this.Comments.push(args.Comment)", Values = { { "Comment", new BlogComment { Content = "Lore ipsum", Title = "Some title" } } } }, patchIfMissing: null)); #endregion #region insert_new_comment_at_position_1_session // insert a new comment at position 1 to Comments session.Advanced.Defer(new PatchCommandData( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = "this.Comments.splice(1,0,args.Comment)", Values = { { "Comment", new BlogComment { Content = "Lore ipsum", Title = "Some title" } } } }, patchIfMissing: null)); session.SaveChanges(); #endregion #region insert_new_comment_at_position_1_store store.Operations.Send(new PatchOperation( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = "this.Comments.splice(1,0,args.Comment)", Values = { { "Comment", new BlogComment { Content = "Lore ipsum", Title = "Some title" } } } }, patchIfMissing: null)); #endregion #region modify_a_comment_at_position_3_in_comments_session // modify a comment at position 3 in Comments session.Advanced.Defer(new PatchCommandData( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = "this.Comments.splice(3,1,args.Comment)", Values = { { "Comment", new BlogComment { Content = "Lore ipsum", Title = "Some title" } } } }, patchIfMissing: null)); session.SaveChanges(); #endregion #region modify_a_comment_at_position_3_in_comments_store // modify a comment at position 3 in Comments store.Operations.Send(new PatchOperation( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = "this.Comments.splice(1,0,args.Comment)", Values = { { "Comment", new BlogComment { Content = "Lore ipsum", Title = "Some title" } } } }, patchIfMissing: null)); #endregion #region filter_items_from_array_session // filter out all comments of a blogpost which contains the word "wrong" in their contents session.Advanced.Defer(new PatchCommandData( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = @"this.Comments = this.Comments.filter(comment=> comment.Content.includes(args.TitleToRemove));", Values = { { "TitleToRemove", "wrong" } } }, patchIfMissing: null)); session.SaveChanges(); #endregion #region filter_items_from_array_store // filter out all comments of a blogpost which contains the word "wrong" in their contents store.Operations.Send(new PatchOperation( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = @"this.Comments = this.Comments.filter(comment=> comment.Content.includes(args.TitleToRemove));", Values = { { "TitleToRemove", "wrong" } } }, patchIfMissing: null)); #endregion #region update_product_name_in_order_session // update product names in order, according to loaded product documents session.Advanced.Defer(new PatchCommandData( id: "orders/1", changeVector: null, patch: new PatchRequest { Script = @"this.Lines.forEach(line=> { var productDoc = load(line.Product); line.ProductName = productDoc.Name; });" }, patchIfMissing: null)); session.SaveChanges(); #endregion #region update_product_name_in_order_store // update product names in order, according to loaded product documents store.Operations.Send(new PatchOperation( id: "blogposts/1", changeVector: null, patch: new PatchRequest { Script = @"this.Lines.forEach(line=> { var productDoc = load(line.Product); line.ProductName = productDoc.Name; });" }, patchIfMissing: null)); #endregion } using (var store = new DocumentStore()) { #region update_value_in_whole_collection // increase by 10 Freight field in all orders var operation = store .Operations .Send(new PatchByQueryOperation(@"from Orders as o update { o.Freight +=10; }")); // Wait for the operation to be complete on the server side. // Not waiting for completion will not harm the patch process and it will continue running to completion. operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { #region update-value-by-dynamic-query // set discount to all orders that was processed by a specific employee var operation = store .Operations .Send(new PatchByQueryOperation(@"from Orders as o where o.Employee = args.EmployeeToUpdate update { o.Lines.forEach(line=> line.Discount = 0.3); }")); operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { #region update-value-by-index-query // switch all products with supplier 'suppliers/12-A' with 'suppliers/13-A' var operation = store .Operations .Send(new PatchByQueryOperation(new IndexQuery { Query = @"from index 'Product/Search' as p where p.Supplier = 'suppliers/12-A' update { p.Supplier = 'suppliers/13-A' }" })); operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { #region update-on-stale-results // patch on stale results var operation = store .Operations .Send(new PatchByQueryOperation(new IndexQuery { Query = @"from Orders as o where o.Company = 'companies/12-A' update { o.Company = 'companies/13-A' }" }, new QueryOperationOptions { AllowStale = true })); operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { #region report_progress_on_patch // report progress during patch processing var operation = store .Operations .Send(new PatchByQueryOperation(new IndexQuery { Query = @"from Orders as o where o.Company = 'companies/12-A' update { o.Company = 'companies/13-A' }" }, new QueryOperationOptions { AllowStale = true })); operation.OnProgressChanged = x => { DeterminateProgress progress = (DeterminateProgress)x; Console.WriteLine($"Progress: Processed:{progress.Total}; Total:{progress.Processed}"); }; operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { #region patch-request-with-details // perform patch and create summary of processing statuses var operation = store .Operations .Send(new PatchByQueryOperation(new IndexQuery { Query = @"from Orders as o where o.Company = 'companies/12-A' update { o.Company = 'companies/13-A' }" }, new QueryOperationOptions { RetrieveDetails = true })); var result = operation.WaitForCompletion <BulkOperationResult>(); var formattedResults = result.Details .Select(x => (BulkOperationResult.PatchDetails)x) .GroupBy(x => x.Status) .Select(x => $"{x.Key}: {x.Count()}").ToList(); formattedResults.ForEach(Console.WriteLine); #endregion } using (var store = new DocumentStore()) { #region change-collection-name // delete the document before recreating it with a different collection name var operation = store .Operations .Send(new PatchByQueryOperation(new IndexQuery { Query = @"from Orders as c update { del(id(c)); this[""@metadata""][""@collection""] = ""New_Orders""; put(id(c), this); }" })); operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { #region change-all-documents // perform a patch on all documents using @all_docs keyword var operation = store .Operations .Send(new PatchByQueryOperation(new IndexQuery { Query = @"from @all_docs update { this.Updated = true; }" })); operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { #region patch-by-id // perform a patch by document ID var operation = store .Operations .Send(new PatchByQueryOperation(new IndexQuery { Query = @"from @all_docs as d where id() in ('orders/1-A', 'companies/1-A') update { d.Updated = true; }" })); operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { #region patch-by-id-using-parameters // perform a patch by document ID var operation = store .Operations .Send(new PatchByQueryOperation(new IndexQuery { QueryParameters = new Parameters { { "ids", new[] { "orders/1-A", "companies/1-A" } } }, Query = @"from @all_docs as d where id() in ($ids) update { d.Updated = true; }" })); operation.WaitForCompletion(); #endregion } using (var store = new DocumentStore()) { using (var session = store.OpenSession()) { #region add_document_session session.Advanced.Defer(new PatchCommandData("employees/1-A", null, new PatchRequest { Script = "put('orders/', { Employee: id(this) });", }, null)); session.SaveChanges(); #endregion #region clone_document_session session.Advanced.Defer(new PatchCommandData("employees/1-A", null, new PatchRequest { Script = "put('employees/', this);", }, null)); session.SaveChanges(); #endregion } #region add_document_store store.Operations.Send(new PatchOperation("employees/1-A", null, new PatchRequest { Script = "put('orders/', { Employee: id(this) });", })); #endregion #region clone_document_store store.Operations.Send(new PatchOperation("employees/1-A", null, new PatchRequest { Script = "put('employees/', this);", })); #endregion } }
public void Can_notify_about_operations_progress_and_completion() { using (var db = CreateDocumentDatabase()) { var token = new OperationCancelToken(TimeSpan.FromMinutes(2), CancellationToken.None, CancellationToken.None); var notifications = new BlockingCollection <OperationStatusChange>(); var mre = new ManualResetEventSlim(false); var operationId = db.Operations.GetNextOperationId(); db.Changes.OnOperationStatusChange += notifications.Add; db.Operations.AddOperation(null, "Operations Test", (Raven.Server.Documents.Operations.Operations.OperationType) 0, onProgress => Task.Factory.StartNew <IOperationResult>(() => { var p = new DeterminateProgress { Total = 1024, Processed = 0 }; onProgress(p); mre.Wait(token.Token); mre.Reset(); p.Processed = 500; onProgress(p); mre.Wait(token.Token); return(new SampleOperationResult { Message = "I'm done" }); }), operationId, token: token); OperationStatusChange change; Assert.True(notifications.TryTake(out change, TimeSpan.FromSeconds(1))); Assert.NotNull(change.OperationId); Assert.Equal(OperationStatus.InProgress, change.State.Status); Assert.Null(change.State.Result); var progress = change.State.Progress as DeterminateProgress; Assert.NotNull(progress); Assert.Equal(1024, progress.Total); Assert.Equal(0, progress.Processed); mre.Set(); Assert.True(notifications.TryTake(out change, TimeSpan.FromSeconds(1))); Assert.NotNull(change.OperationId); Assert.Equal(OperationStatus.InProgress, change.State.Status); Assert.Null(change.State.Result); progress = change.State.Progress as DeterminateProgress; Assert.NotNull(progress); Assert.Equal(1024, progress.Total); Assert.Equal(500, progress.Processed); mre.Set(); Assert.True(notifications.TryTake(out change, TimeSpan.FromSeconds(1))); Assert.NotNull(change.OperationId); Assert.Equal(OperationStatus.Completed, change.State.Status); Assert.NotNull(change.State.Result); Assert.Null(change.State.Progress); var result = change.State.Result as SampleOperationResult; Assert.NotNull(result); Assert.Equal("I'm done", result.Message); } }
protected async Task <IOperationResult> ExecuteOperation(string collectionName, long start, long take, CollectionOperationOptions options, DocumentsOperationContext context, Action <DeterminateProgress> onProgress, Func <string, TransactionOperationsMerger.MergedTransactionCommand> action, OperationCancelToken token) { var progress = new DeterminateProgress(); var cancellationToken = token.Token; var isAllDocs = collectionName == Constants.Documents.Collections.AllDocumentsCollection; long lastEtag; long totalCount; using (context.OpenReadTransaction()) { lastEtag = GetLastEtagForCollection(context, collectionName, isAllDocs); totalCount = GetTotalCountForCollection(context, collectionName, isAllDocs); } progress.Total = totalCount; // send initial progress with total count set, and 0 as processed count onProgress(progress); long startEtag = 0; var alreadySeenIdsCount = new Reference <long>(); string startAfterId = null; using (var rateGate = options.MaxOpsPerSecond.HasValue ? new RateGate(options.MaxOpsPerSecond.Value, TimeSpan.FromSeconds(1)) : null) { var end = false; var ids = new Queue <string>(OperationBatchSize); while (startEtag <= lastEtag) { cancellationToken.ThrowIfCancellationRequested(); ids.Clear(); Database.ForTestingPurposes?.CollectionRunnerBeforeOpenReadTransaction?.Invoke(); using (context.OpenReadTransaction()) { foreach (var document in GetDocuments(context, collectionName, startEtag, startAfterId, alreadySeenIdsCount, OperationBatchSize, isAllDocs, DocumentFields.Id, out bool isStartsWithOrIdQuery)) { using (document) { cancellationToken.ThrowIfCancellationRequested(); token.Delay(); if (isAllDocs && document.Id.StartsWith(HiLoHandler.RavenHiloIdPrefix, StringComparison.OrdinalIgnoreCase)) { continue; } // start with and id queries aren't ordered by the etag if (isStartsWithOrIdQuery == false && document.Etag > lastEtag) { // we don't want to go over the documents that we have patched end = true; break; } startEtag = document.Etag + 1; if (start > 0) { start--; continue; } if (take-- <= 0) { end = true; break; } startAfterId = document.Id; ids.Enqueue(document.Id); context.Transaction.ForgetAbout(document); } } } if (ids.Count == 0) { break; } do { var command = new ExecuteRateLimitedOperations <string>(ids, action, rateGate, token, maxTransactionSize: 16 * Voron.Global.Constants.Size.Megabyte, batchSize: OperationBatchSize); await Database.TxMerger.Enqueue(command); progress.Processed += command.Processed; onProgress(progress); if (command.NeedWait) { rateGate?.WaitToProceed(); } token.Delay(); } while (ids.Count > 0); if (end) { break; } } } return(new BulkOperationResult { Total = progress.Processed }); }
protected async Task <IOperationResult> ExecuteOperation(string collectionName, CollectionOperationOptions options, DocumentsOperationContext context, Action <DeterminateProgress> onProgress, Func <LazyStringValue, TransactionOperationsMerger.MergedTransactionCommand> action, OperationCancelToken token) { const int batchSize = 1024; var progress = new DeterminateProgress(); var cancellationToken = token.Token; var isAllDocs = collectionName == Constants.Documents.Collections.AllDocumentsCollection; long lastEtag; long totalCount; using (context.OpenReadTransaction()) { lastEtag = GetLastEtagForCollection(context, collectionName, isAllDocs); totalCount = GetTotalCountForCollection(context, collectionName, isAllDocs); } progress.Total = totalCount; // send initial progress with total count set, and 0 as processed count onProgress(progress); long startEtag = 0; using (var rateGate = options.MaxOpsPerSecond.HasValue ? new RateGate(options.MaxOpsPerSecond.Value, TimeSpan.FromSeconds(1)) : null) { var end = false; while (startEtag <= lastEtag) { cancellationToken.ThrowIfCancellationRequested(); using (context.OpenReadTransaction()) { var ids = new Queue <LazyStringValue>(batchSize); foreach (var document in GetDocuments(context, collectionName, startEtag, batchSize, isAllDocs)) { cancellationToken.ThrowIfCancellationRequested(); token.Delay(); if (isAllDocs && IsHiLoDocument(document)) { continue; } if (document.Etag > lastEtag) // we don't want to go over the documents that we have patched { end = true; break; } startEtag = document.Etag + 1; ids.Enqueue(document.Id); } if (ids.Count == 0) { break; } do { var command = new ExecuteRateLimitedOperations <LazyStringValue>(ids, action, rateGate, token); await Database.TxMerger.Enqueue(command); progress.Processed += command.Processed; onProgress(progress); if (command.NeedWait) { rateGate?.WaitToProceed(); } } while (ids.Count > 0); if (end) { break; } } } } return(new BulkOperationResult { Total = progress.Processed }); }
private async Task <IOperationResult> ExecuteOperation(string indexName, IndexQueryServerSide query, QueryOperationOptions options, DocumentsOperationContext context, Action <DeterminateProgress> onProgress, Action <string> action, OperationCancelToken token) { var index = GetIndex(indexName); if (index.Type.IsMapReduce()) { throw new InvalidOperationException("Cannot execute bulk operation on Map-Reduce indexes."); } query = ConvertToOperationQuery(query, options); const int BatchSize = 1024; RavenTransaction tx = null; var operationsInCurrentBatch = 0; List <string> resultKeys; try { var results = await index.Query(query, context, token).ConfigureAwait(false); if (options.AllowStale == false && results.IsStale) { throw new InvalidOperationException("Cannot perform bulk operation. Query is stale."); } resultKeys = new List <string>(results.Results.Count); foreach (var document in results.Results) { resultKeys.Add(document.Key.ToString()); } } finally //make sure to close tx if DocumentConflictException is thrown { context.CloseTransaction(); } var progress = new DeterminateProgress { Total = resultKeys.Count, Processed = 0 }; onProgress(progress); using (var rateGate = options.MaxOpsPerSecond.HasValue ? new RateGate(options.MaxOpsPerSecond.Value, TimeSpan.FromSeconds(1)) : null) { foreach (var document in resultKeys) { if (rateGate != null && rateGate.WaitToProceed(0) == false) { using (tx) { tx?.Commit(); } tx = null; rateGate.WaitToProceed(); } if (tx == null) { operationsInCurrentBatch = 0; tx = context.OpenWriteTransaction(); } action(document); operationsInCurrentBatch++; progress.Processed++; if (progress.Processed % 128 == 0) { onProgress(progress); } if (operationsInCurrentBatch < BatchSize) { continue; } using (tx) { tx.Commit(); } tx = null; } } using (tx) { tx?.Commit(); } return(new BulkOperationResult { Total = progress.Total }); }
public DocumentIdQueryResult(DeterminateProgress progress, Action <DeterminateProgress> onProgress, OperationCancelToken token) { _progress = progress; _onProgress = onProgress; _token = token; }
private IOperationResult ExecuteOperation(string collectionName, CollectionOperationOptions options, DocumentsOperationContext context, Action <DeterminateProgress> onProgress, Action <string> action, OperationCancelToken token) { const int batchSize = 1024; var progress = new DeterminateProgress(); var cancellationToken = token.Token; long lastEtag; long totalCount; using (context.OpenReadTransaction()) { lastEtag = _database.DocumentsStorage.GetLastDocumentEtag(context, collectionName); _database.DocumentsStorage.GetNumberOfDocumentsToProcess(context, collectionName, 0, out totalCount); } progress.Total = totalCount; long startEtag = 0; using (var rateGate = options.MaxOpsPerSecond.HasValue ? new RateGate(options.MaxOpsPerSecond.Value, TimeSpan.FromSeconds(1)) : null) { bool done = false; //The reason i do this nested loop is because i can't operate on a document while iterating the document tree. while (startEtag <= lastEtag) { cancellationToken.ThrowIfCancellationRequested(); bool wait = false; using (var tx = context.OpenWriteTransaction()) { var documents = _database.DocumentsStorage.GetDocumentsFrom(context, collectionName, startEtag, 0, batchSize).ToList(); foreach (var document in documents) { cancellationToken.ThrowIfCancellationRequested(); if (document.Etag > lastEtag)// we don't want to go over the documents that we have patched { done = true; break; } if (rateGate != null && rateGate.WaitToProceed(0) == false) { wait = true; break; } startEtag = document.Etag; action(document.Key); progress.Processed++; } tx.Commit(); onProgress(progress); if (wait) { rateGate.WaitToProceed(); } if (done || documents.Count == 0) { break; } } } } return(new BulkOperationResult { Total = progress.Processed }); }