public void CreateRequest() { using (var store = GetDocumentStore()) { var requestExecutor = store.GetRequestExecutor(); using (requestExecutor.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { var dummy = new ServerNode { ClusterTag = "A", Database = "dummy", ServerRole = ServerNode.Role.Member, Url = "http://dummy:1234" }; var batchCommand = new SingleNodeBatchCommand(store.Conventions, context, new List <ICommandData>()); var uri = requestExecutor.CreateRequest(context, dummy, batchCommand, out _); Assert.DoesNotContain("raft", uri.RequestUri.ToString()); var clusterBatchCommand = new ClusterWideBatchCommand(store.Conventions, new List <ICommandData>()); uri = requestExecutor.CreateRequest(context, dummy, clusterBatchCommand, out _); Assert.Contains("raft", uri.RequestUri.ToString()); } } }
protected override int LoadInternal(IEnumerable <ICommandData> items, DocumentsOperationContext context, EtlStatsScope scope) { var commands = items as List <ICommandData>; Debug.Assert(commands != null); if (commands.Count == 0) { return(0); } if (ShouldTrackTimeSeries()) { foreach (var command in commands) { if (command is TimeSeriesBatchCommandData tsbc) { if (TimeSeriesHandler.CheckIfIncrementalTs(tsbc.Name)) { throw new NotSupportedException($"Load isn't support for incremental time series '{tsbc.Name}' at document '{tsbc.Id}'"); } } } } BatchOptions options = null; if (Configuration.LoadRequestTimeoutInSec != null) { options = new BatchOptions { RequestTimeout = TimeSpan.FromSeconds(Configuration.LoadRequestTimeoutInSec.Value) }; } using (var batchCommand = new SingleNodeBatchCommand(DocumentConventions.DefaultForServer, context, commands, options)) { var duration = Stopwatch.StartNew(); try { BeforeActualLoad?.Invoke(this); AsyncHelpers.RunSync(() => _requestExecutor.ExecuteAsync(batchCommand, context, token: CancellationToken)); _recentUrl = _requestExecutor.Url; return(commands.Count); } catch (OperationCanceledException e) { if (CancellationToken.IsCancellationRequested == false) { ThrowTimeoutException(commands.Count, duration.Elapsed, e); } throw; } } }
public async Task Examples() { using (var documentStore = new DocumentStore()) { #region batch_3 using (var session = documentStore.OpenSession()) { var commands = new List <ICommandData> { new PutCommandData("users/3", null, new DynamicJsonValue { ["Name"] = "James" }), new PatchCommandData("users/1-A", null, new PatchRequest { Script = "this.Name = 'Nhoj';" }, null), new DeleteCommandData("users/2-A", null) }; // By including the method SingleNodeBatchCommand(), // multiple commands can be executed in a single request // and several operations can share the same transaction. var batch = new SingleNodeBatchCommand(documentStore.Conventions, session.Advanced.Context, commands); session.Advanced.RequestExecutor.Execute(batch, session.Advanced.Context); } #endregion #region batch_3_async using (var session = documentStore.OpenAsyncSession()) { var commands = new List <ICommandData> { new PutCommandData("users/3", null, new DynamicJsonValue { ["Name"] = "James" }), new PatchCommandData("users/1-A", null, new PatchRequest { Script = "this.Name = 'Nhoj';" }, null), new DeleteCommandData("users/2-A", null) }; // By including the method SingleNodeBatchCommand(), // multiple commands can be executed in a single request // and several operations can share the same transaction. var batch = new SingleNodeBatchCommand(documentStore.Conventions, session.Advanced.Context, commands); await session.Advanced.RequestExecutor.ExecuteAsync(batch, session.Advanced.Context); } #endregion } }
protected override int LoadInternal(IEnumerable <ICommandData> items, DocumentsOperationContext context) { var commands = items as List <ICommandData>; Debug.Assert(commands != null); if (commands.Count == 0) { return(0); } BatchOptions options = null; if (Configuration.LoadRequestTimeoutInSec != null) { options = new BatchOptions { RequestTimeout = TimeSpan.FromSeconds(Configuration.LoadRequestTimeoutInSec.Value) }; } using (var batchCommand = new SingleNodeBatchCommand(DocumentConventions.DefaultForServer, context, commands, options)) { var duration = Stopwatch.StartNew(); try { BeforeActualLoad?.Invoke(this); AsyncHelpers.RunSync(() => _requestExecutor.ExecuteAsync(batchCommand, context, token: CancellationToken)); _recentUrl = _requestExecutor.Url; return(commands.Count); } catch (OperationCanceledException e) { if (CancellationToken.IsCancellationRequested == false) { ThrowTimeoutException(commands.Count, duration.Elapsed, e); } throw; } } }
public async Task BatchAsync(List <ICommandData> commands) { var command = new SingleNodeBatchCommand(_store.Conventions, Context, commands); await RequestExecutor.ExecuteAsync(command, Context); }
public async Task PutDifferentAttachmentsShouldConflict() { using (var store1 = GetDocumentStore(options: new Options { ModifyDatabaseRecord = record => { record.ConflictSolverConfig = new ConflictSolver(); } })) using (var store2 = GetDocumentStore(options: new Options { ModifyDatabaseRecord = record => { record.ConflictSolverConfig = new ConflictSolver(); } })) { await SetDatabaseId(store1, new Guid("00000000-48c4-421e-9466-000000000000")); await SetDatabaseId(store2, new Guid("99999999-48c4-421e-9466-999999999999")); using (var session = store1.OpenAsyncSession()) { var x = new User { Name = "Fitzchak" }; await session.StoreAsync(x, "users/1"); await session.SaveChangesAsync(); using (var a1 = new MemoryStream(new byte[] { 1, 2, 3 })) { await store1.Operations.SendAsync(new PutAttachmentOperation("users/1", "a1", a1, "a1/png")); } using (var session2 = store2.OpenSession()) { session2.Store(new User { Name = "Fitzchak" }, "users/1"); session2.SaveChanges(); using (var a2 = new MemoryStream(new byte[] { 1, 2, 3, 4, 5 })) { store2.Operations.Send(new PutAttachmentOperation("users/1", "a1", a2, "a1/png")); } await SetupReplicationAsync(store1, store2); await session.StoreAsync(new User { Name = "Toli" }, "users/2"); await session.SaveChangesAsync(); WaitForDocumentToReplicate <User>(store2, "users/2", 3000); var conflicts = (await store2.Commands().GetConflictsForAsync("users/1")).ToList(); Assert.Equal(2, conflicts.Count); var requestExecutor = store2.GetRequestExecutor(); using (var context = JsonOperationContext.ShortTermSingleUse()) using (var stringStream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(_conflictedDocument))) using (var blittableJson = await context.ReadForMemoryAsync(stringStream, "Reading of foo/bar")) { var result = new InMemoryDocumentSessionOperations.SaveChangesData((InMemoryDocumentSessionOperations)session2); result.SessionCommands.Add(new PutCommandDataWithBlittableJson("users/1", null, blittableJson)); var sbc = new SingleNodeBatchCommand(DocumentConventions.Default, context, result.SessionCommands, result.Options); await requestExecutor.ExecuteAsync(sbc, context); } } } using (var session = store1.OpenAsyncSession()) { var conflicts = (await store2.Commands().GetConflictsForAsync("users/1")).ToList(); Assert.Equal(0, conflicts.Count); Assert.True(await session.Advanced.Attachments.ExistsAsync("users/1", "a1")); } } }