public async Task <Operation> ImportAsync(DatabaseSmugglerImportOptions options, Stream stream, CancellationToken token = default(CancellationToken)) { if (options == null) { throw new ArgumentNullException(nameof(options)); } if (stream == null) { throw new ArgumentNullException(nameof(stream)); } JsonOperationContext context; using (_requestExecutor.ContextPool.AllocateOperationContext(out context)) { var getOperationIdCommand = new GetNextOperationIdCommand(); await _requestExecutor.ExecuteAsync(getOperationIdCommand, context, token).ConfigureAwait(false); var operationId = getOperationIdCommand.Result; var command = new ImportCommand(_requestExecutor.Conventions, context, options, stream, operationId); await _requestExecutor.ExecuteAsync(command, context, token).ConfigureAwait(false); return(new Operation(_requestExecutor, () => _store.Changes(), _requestExecutor.Conventions, operationId)); } }
public async Task <Operation> ImportAsync(DatabaseSmugglerImportOptions options, Stream stream, CancellationToken token = default) { if (options == null) { throw new ArgumentNullException(nameof(options)); } if (stream == null) { throw new ArgumentNullException(nameof(stream)); } if (_requestExecutor == null) { throw new InvalidOperationException("Cannot use Smuggler without a database defined, did you forget to call ForDatabase?"); } using (_requestExecutor.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { var getOperationIdCommand = new GetNextOperationIdCommand(); await _requestExecutor.ExecuteAsync(getOperationIdCommand, context, sessionInfo : null, token : token).ConfigureAwait(false); var operationId = getOperationIdCommand.Result; var command = new ImportCommand(_requestExecutor.Conventions, context, options, stream, operationId); await _requestExecutor.ExecuteAsync(command, context, sessionInfo : null, token : token).ConfigureAwait(false); return(new Operation(_requestExecutor, () => _store.Changes(_databaseName), _requestExecutor.Conventions, operationId)); } }
public async Task NestedObjectShouldBeExportedAndImportedProperly() { const string id = "companies/1"; using (var store = GetDocumentStore()) { string cv; using (var session = store.OpenSession()) { session.Store(_testCompany, id); session.SaveChanges(); cv = session.Advanced.GetChangeVectorFor(_testCompany); } var client = new HttpClient(); var stream = await client.GetStreamAsync($"{store.Urls[0]}/databases/{store.Database}/streams/queries?query=From%20companies&format=csv"); using (var commands = store.Commands()) { var getOperationIdCommand = new GetNextOperationIdCommand(); await commands.RequestExecutor.ExecuteAsync(getOperationIdCommand, commands.Context); var operationId = getOperationIdCommand.Result; { var csvImportCommand = new CsvImportCommand(stream, null, operationId); await commands.ExecuteAsync(csvImportCommand); var operation = new Operation(commands.RequestExecutor, () => store.Changes(), store.Conventions, operationId); await operation.WaitForCompletionAsync(); } } using (var session = store.OpenSession()) { var res = session.Load <Company>(id); Assert.NotEqual(session.Advanced.GetChangeVectorFor(res), cv); try { Assert.Equal(res, _testCompany); } catch (Exception) { var sb = new StringBuilder(); sb.AppendLine("Expected:"); sb.AppendLine(JObject.FromObject(res).ToString(Formatting.Indented)); sb.AppendLine(); sb.AppendLine("Actual:"); sb.AppendLine(JObject.FromObject(_testCompany).ToString(Formatting.Indented)); Console.WriteLine(sb); throw; } } } }
private async Task <Operation> ExportAsync(DatabaseSmugglerExportOptions options, Func <Stream, Task> handleStreamResponse, Task additionalTask, CancellationToken token = default) { if (options == null) { throw new ArgumentNullException(nameof(options)); } if (_requestExecutor == null) { throw new InvalidOperationException("Cannot use Smuggler without a database defined, did you forget to call ForDatabase?"); } using (_requestExecutor.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { var getOperationIdCommand = new GetNextOperationIdCommand(); await _requestExecutor.ExecuteAsync(getOperationIdCommand, context, sessionInfo : null, token : token).ConfigureAwait(false); var operationId = getOperationIdCommand.Result; var tcs = new TaskCompletionSource <object>(TaskCreationOptions.RunContinuationsAsynchronously); var cancellationTokenRegistration = token.Register(() => tcs.TrySetCanceled(token)); var command = new ExportCommand(_requestExecutor.Conventions, context, options, handleStreamResponse, operationId, tcs); var requestTask = _requestExecutor.ExecuteAsync(command, context, sessionInfo: null, token: token) .ContinueWith(t => { cancellationTokenRegistration.Dispose(); if (t.IsFaulted) { tcs.TrySetException(t.Exception); if (Logger.IsOperationsEnabled) { Logger.Operations("Could not execute export", t.Exception); } } }, token); try { await tcs.Task.ConfigureAwait(false); } catch (Exception) { await requestTask.ConfigureAwait(false); await tcs.Task.ConfigureAwait(false); } return(new Operation( _requestExecutor, () => _store.Changes(_databaseName), _requestExecutor.Conventions, operationId, null, additionalTask)); } }
private async Task <long> GetOperationId() { using (Database.DocumentsStorage.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { var getNextOperationIdRequest = new GetNextOperationIdCommand(); await _requestExecutor.ExecuteAsync(getNextOperationIdRequest, context, CancelToken.Token); return(getNextOperationIdRequest.Result); } }
private async Task WaitForId() { if (_operationId != -1) { return; } var bulkInsertGetIdRequest = new GetNextOperationIdCommand(); await _requestExecutor.ExecuteAsync(bulkInsertGetIdRequest, _context, sessionInfo : null, token : _token).ConfigureAwait(false); _operationId = bulkInsertGetIdRequest.Result; }
public async Task CanCompactDatabaseWithAttachment() { var path = NewDataPath(); using (var store = GetDocumentStore(new Options { Path = path })) { var buffer = new byte[16 * 1024 * 1024]; new Random().NextBytes(buffer); using (var session = store.OpenSession()) using (var fileStream = new MemoryStream(buffer)) { var user = new User { Name = "Iftah" }; session.Store(user, "users/1"); session.Advanced.StoreAttachment(user, "randomFile.txt", fileStream); session.SaveChanges(); } using (var session = store.OpenSession()) { session.Advanced.DeleteAttachment("users/1", "randomFile.txt"); session.SaveChanges(); } var oldSize = StorageCompactionTestsSlow.GetDirSize(new DirectoryInfo(path)); var requestExecutor = store.GetRequestExecutor(); long compactOperationId; using (requestExecutor.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { var getOperationIdCommand = new GetNextOperationIdCommand(); await requestExecutor.ExecuteAsync(getOperationIdCommand, context); compactOperationId = getOperationIdCommand.Result; } var operation = await store.Operations.SendAsync(new CompactDatabaseOperation(store.Database, compactOperationId), isServerOperation : true); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(60)); var newSize = StorageCompactionTestsSlow.GetDirSize(new DirectoryInfo(path)); Assert.True(oldSize > newSize); } }
private async Task WaitForId() { if (_operationId != -1) { return; } var bulkInsertGetIdRequest = new GetNextOperationIdCommand(); await ExecuteAsync(bulkInsertGetIdRequest, token : _token).ConfigureAwait(false); _operationId = bulkInsertGetIdRequest.Result; _nodeTag = bulkInsertGetIdRequest.NodeTag; }
public async Task CanCompactDatabase() { var path = NewDataPath(); using (var store = GetDocumentStore(new Options { Path = path })) { store.Admin.Send(new CreateSampleDataOperation()); for (int i = 0; i < 3; i++) { await store.Operations.Send(new PatchByQueryOperation(new IndexQuery { Query = @"FROM Orders UPDATE { put(""orders/"", this); } " })).WaitForCompletionAsync(TimeSpan.FromSeconds(30)); } WaitForIndexing(store); var deleteOperation = store.Operations.Send(new DeleteByQueryOperation(new IndexQuery() { Query = "FROM orders" })); await deleteOperation.WaitForCompletionAsync(TimeSpan.FromSeconds(60)); var oldSize = StorageCompactionTestsSlow.GetDirSize(new DirectoryInfo(path)); var requestExecutor = store.GetRequestExecutor(); long compactOperationId; using (requestExecutor.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { var getOperationIdCommand = new GetNextOperationIdCommand(); await requestExecutor.ExecuteAsync(getOperationIdCommand, context); compactOperationId = getOperationIdCommand.Result; } var compactOperation = store.Operations.Send(new CompactDatabaseOperation(store.Database, compactOperationId)); await compactOperation.WaitForCompletionAsync(TimeSpan.FromSeconds(60)); var newSize = StorageCompactionTestsSlow.GetDirSize(new DirectoryInfo(path)); Assert.True(oldSize < newSize); } }
public async Task CannotImportCsvWithInvalidCsvConfigCharParams() { using (var store = GetDocumentStore()) { using (var session = store.OpenSession()) { session.Store(_testCompany, "companies/1"); session.Store(new { Query = "From companies" }, "queries/1"); session.SaveChanges(); } var client = new HttpClient(); var stream = await client.GetStreamAsync($"{store.Urls[0]}/databases/{store.Database}/streams/queries?fromDocument=queries%2F1&format=csv"); using (var commands = store.Commands()) { var getOperationIdCommand = new GetNextOperationIdCommand(); await commands.RequestExecutor.ExecuteAsync(getOperationIdCommand, commands.Context); var operationId = getOperationIdCommand.Result; var invalidCsvConfig = new InValidCsvImportOptions() { Delimiter = ",", Quote = " '", // 2 characters is invalid Comment = " #", // 2 characters is invalid AllowComments = true, TrimOptions = "None" }; var csvImportCommand = new CsvImportCommand(stream, null, operationId, invalidCsvConfig); var exception = await Assert.ThrowsAsync <Raven.Client.Exceptions.RavenException>(async() => { await commands.ExecuteAsync(csvImportCommand); var operation = new Operation(commands.RequestExecutor, () => store.Changes(), store.Conventions, operationId); await operation.WaitForCompletionAsync(); }); Assert.Contains("Please verify that only one character is used", exception.Message); } } }
private async Task <List <Measurement> > MeasurementUploading(string collection) { var dateTime = new DateTime(2019, 1, 1); var list = new List <Measurement>(); for (var i = 0; i < 32; i++) { var requestExecutor = DocumentStore.GetRequestExecutor(); using (var session = DocumentStore.OpenSession()) { using (var memoryStream = new MemoryStream()) using (var writer = new StreamWriter(memoryStream)) { dateTime = dateTime.AddDays(1); var dailyMeasurements = WriteFindingsToCsvStream(writer, dateTime); list.AddRange(dailyMeasurements); var getOperationIdCommand = new GetNextOperationIdCommand(); await requestExecutor.ExecuteAsync(getOperationIdCommand, session.Advanced.Context).ConfigureAwait(false); var operationId = getOperationIdCommand.Result; memoryStream.Seek(0, SeekOrigin.Begin); var csvImportCommand = new CsvImportCommand(memoryStream, collection, operationId); async Task Action() { await requestExecutor.ExecuteAsync(csvImportCommand, session.Advanced.Context).ConfigureAwait(false); var operation = new Operation(requestExecutor, () => DocumentStore.Changes(), DocumentStore.Conventions, operationId); await operation.WaitForCompletionAsync().ConfigureAwait(false); } await Retry(5, Action, "import csv").ConfigureAwait(false); memoryStream.Seek(0, SeekOrigin.Begin); } } Thread.Sleep(2000); } return(list); }
public async Task NestedObjectShouldBeExportedAndImportedProperly() { var id = "companies/1"; string cv; using (var store = GetDocumentStore()) { using (var session = store.OpenSession()) { session.Store(_testCompany, id); session.SaveChanges(); cv = session.Advanced.GetChangeVectorFor(_testCompany); } var client = new HttpClient(); var stream = await client.GetStreamAsync($"{store.Urls[0]}/databases/{store.Database}/streams/queries?query=From%20companies&format=csv"); using (var commands = store.Commands()) { var getOperationIdCommand = new GetNextOperationIdCommand(); await commands.RequestExecutor.ExecuteAsync(getOperationIdCommand, commands.Context); var operationId = getOperationIdCommand.Result; { var csvImportCommand = new CsvImportCommand(stream, null, operationId); await commands.ExecuteAsync(csvImportCommand); var operation = new Operation(commands.RequestExecutor, () => store.Changes(), store.Conventions, operationId); await operation.WaitForCompletionAsync(); } } using (var session = store.OpenSession()) { var res = session.Load <Company>(id); Assert.NotEqual(session.Advanced.GetChangeVectorFor(res), cv); Assert.Equal(res, _testCompany); } } }
private async Task <Operation> ExportAsync(DatabaseSmugglerOptions options, Func <Stream, Task> handleStreamResponse, CancellationToken token = default(CancellationToken)) { if (options == null) { throw new ArgumentNullException(nameof(options)); } using (_requestExecutor.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { var getOperationIdCommand = new GetNextOperationIdCommand(); await _requestExecutor.ExecuteAsync(getOperationIdCommand, context, token).ConfigureAwait(false); var operationId = getOperationIdCommand.Result; var command = new ExportCommand(_requestExecutor.Conventions, context, options, handleStreamResponse, operationId); await _requestExecutor.ExecuteAsync(command, context, token).ConfigureAwait(false); return(new Operation(_requestExecutor, () => _store.Changes(), _requestExecutor.Conventions, operationId)); } }
public async Task ExportingAndImportingCsvUsingQueryFromDocumentShouldWork() { using (var store = GetDocumentStore()) { using (var session = store.OpenSession()) { session.Store(_testCompany, "companies/1"); session.Store(new{ Query = "From%20companies" }, "queries/1"); session.SaveChanges(); } var client = new HttpClient(); var stream = await client.GetStreamAsync($"{store.Urls[0]}/databases/{store.Database}/streams/queries?fromDocument=queries%2F1&format=csv"); using (var commands = store.Commands()) { var getOperationIdCommand = new GetNextOperationIdCommand(); await commands.RequestExecutor.ExecuteAsync(getOperationIdCommand, commands.Context); var operationId = getOperationIdCommand.Result; { var csvImportCommand = new CsvImportCommand(stream, null, operationId); await commands.ExecuteAsync(csvImportCommand); var operation = new Operation(commands.RequestExecutor, () => store.Changes(), store.Conventions, operationId); await operation.WaitForCompletionAsync(); } } using (var session = store.OpenSession()) { var res = session.Query <Company>().ToList(); Assert.Equal(2, res.Count); Assert.Equal(res[0], res[1]); } } }
private async Task <Operation> ImportInternalAsync(DatabaseSmugglerImportOptions options, Stream stream, bool leaveOpen, CancellationToken token = default) { var disposeStream = leaveOpen ? null : new DisposeStreamOnce(stream); IDisposable returnContext = null; Task requestTask = null; try { if (options == null) { throw new ArgumentNullException(nameof(options)); } if (stream == null) { throw new ArgumentNullException(nameof(stream)); } if (_requestExecutor == null) { throw new InvalidOperationException("Cannot use Smuggler without a database defined, did you forget to call ForDatabase?"); } returnContext = _requestExecutor.ContextPool.AllocateOperationContext(out JsonOperationContext context); var getOperationIdCommand = new GetNextOperationIdCommand(); await _requestExecutor.ExecuteAsync(getOperationIdCommand, context, sessionInfo : null, token : token).ConfigureAwait(false); var operationId = getOperationIdCommand.Result; var tcs = new TaskCompletionSource <object>(TaskCreationOptions.RunContinuationsAsynchronously); var cancellationTokenRegistration = token.Register(() => tcs.TrySetCanceled(token)); var command = new ImportCommand(context, options, stream, operationId, tcs, this, getOperationIdCommand.NodeTag); var task = _requestExecutor.ExecuteAsync(command, context, sessionInfo: null, token: token); requestTask = task .ContinueWith(t => { returnContext?.Dispose(); cancellationTokenRegistration.Dispose(); using (disposeStream) { if (t.IsFaulted) { tcs.TrySetException(t.Exception); if (Logger.IsOperationsEnabled) { Logger.Operations("Could not execute import", t.Exception); } } } }, token); try { await tcs.Task.ConfigureAwait(false); } catch (Exception) { await requestTask.ConfigureAwait(false); await tcs.Task.ConfigureAwait(false); } return(new Operation(_requestExecutor, () => _store.Changes(_databaseName, getOperationIdCommand.NodeTag), _requestExecutor.Conventions, operationId, nodeTag: getOperationIdCommand.NodeTag, additionalTask: task)); } catch (Exception e) { if (requestTask == null) { // handle the possible double dispose of return context returnContext?.Dispose(); } disposeStream?.Dispose(); throw e.ExtractSingleInnerException(); } }
private async Task <Operation> ImportInternalAsync(DatabaseSmugglerImportOptions options, Stream stream, bool leaveOpen, CancellationToken token = default) { var disposeStream = leaveOpen ? null : new DisposeStreamOnce(stream); try { if (options == null) { throw new ArgumentNullException(nameof(options)); } if (stream == null) { throw new ArgumentNullException(nameof(stream)); } if (_requestExecutor == null) { throw new InvalidOperationException("Cannot use Smuggler without a database defined, did you forget to call ForDatabase?"); } using (_requestExecutor.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { var getOperationIdCommand = new GetNextOperationIdCommand(); await _requestExecutor.ExecuteAsync(getOperationIdCommand, context, sessionInfo : null, token : token).ConfigureAwait(false); var operationId = getOperationIdCommand.Result; var tcs = new TaskCompletionSource <object>(TaskCreationOptions.RunContinuationsAsynchronously); token.Register(() => tcs.TrySetCanceled(token)); var command = new ImportCommand(_requestExecutor.Conventions, context, options, stream, operationId, tcs); var requestTask = _requestExecutor.ExecuteAsync(command, context, sessionInfo: null, token: token) .ContinueWith(t => { using (disposeStream) { if (t.IsFaulted && Logger.IsOperationsEnabled) { Logger.Operations("Could not execute import", t.Exception); } } }, token); try { await tcs.Task.ConfigureAwait(false); } catch (Exception) { await requestTask.ConfigureAwait(false); } return(new Operation(_requestExecutor, () => _store.Changes(_databaseName), _requestExecutor.Conventions, operationId)); } } catch { disposeStream?.Dispose(); throw; } }