public void Can_Build_Serializator_For_SmugglerResult() { using (var context = JsonOperationContext.ShortTermSingleUse()) { var result = new SmugglerResult(); result.AddError("MessageA"); result.AddInfo("MessageB"); result.AddWarning("MessageC"); result.AddMessage("MessageD"); var djv = result.ToJson(); var json = context.ReadObject(djv, "smuggler/result"); var result2 = JsonDeserializationClient.SmugglerResult(json); Assert.Equal(result.Messages, result2.Messages); var result3 = DocumentConventions.Default.Serialization.DefaultConverter.FromBlittable <SmugglerResult>(json); Assert.Equal(result.Messages, result3.Messages); } }
private async Task BulkImport(BlockingCollection <Func <Task <Stream> > > files, string directory) { var results = new ConcurrentQueue <SmugglerResult>(); var tasks = new Task[Math.Max(1, ProcessorInfo.ProcessorCount / 2)]; var finalResult = new SmugglerResult(); for (int i = 0; i < tasks.Length; i++) { tasks[i] = Task.Run(async() => { while (files.IsCompleted == false) { Func <Task <Stream> > getFile; try { getFile = files.Take(); } catch (Exception) { //TODO : add logging, _silently_ skipping is a bad idea continue; } using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var file = await getFile()) using (var stream = new GZipStream(new BufferedStream(file, 128 * Voron.Global.Constants.Size.Kilobyte), CompressionMode.Decompress)) using (var source = new StreamSource(stream, context)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time); var result = smuggler.Execute(); results.Enqueue(result); } } }); } await Task.WhenAll(tasks); while (results.TryDequeue(out SmugglerResult importResult)) { finalResult.Documents.SkippedCount += importResult.Documents.SkippedCount; finalResult.Documents.ReadCount += importResult.Documents.ReadCount; finalResult.Documents.ErroredCount += importResult.Documents.ErroredCount; finalResult.Documents.LastEtag = Math.Max(finalResult.Documents.LastEtag, importResult.Documents.LastEtag); finalResult.Documents.Attachments = importResult.Documents.Attachments; finalResult.RevisionDocuments.ReadCount += importResult.RevisionDocuments.ReadCount; finalResult.RevisionDocuments.ErroredCount += importResult.RevisionDocuments.ErroredCount; finalResult.RevisionDocuments.LastEtag = Math.Max(finalResult.RevisionDocuments.LastEtag, importResult.RevisionDocuments.LastEtag); finalResult.RevisionDocuments.Attachments = importResult.RevisionDocuments.Attachments; finalResult.Identities.ReadCount += importResult.Identities.ReadCount; finalResult.Identities.ErroredCount += importResult.Identities.ErroredCount; finalResult.Indexes.ReadCount += importResult.Indexes.ReadCount; finalResult.Indexes.ErroredCount += importResult.Indexes.ErroredCount; foreach (var message in importResult.Messages) { finalResult.AddMessage(message); } } using (ContextPool.AllocateOperationContext(out DocumentsOperationContext finalContext)) { var memoryStream = new MemoryStream(); WriteImportResult(finalContext, finalResult, memoryStream); memoryStream.Position = 0; try { using (var output = File.Create(Path.Combine(directory, "smuggler.results.txt"))) { memoryStream.CopyTo(output); } } catch (Exception) { // ignore any failure here } memoryStream.Position = 0; memoryStream.CopyTo(ResponseBodyStream()); } }
public long StartMigrateSingleDatabase(string sourceDatabaseName, DocumentDatabase database) { var operationId = database.Operations.GetNextOperationId(); var cancelToken = new OperationCancelToken(_cancellationToken); var result = new SmugglerResult(); database.Operations.AddOperation(null, $"Database name: '{sourceDatabaseName}' from url: {_serverUrl}", Operations.OperationType.DatabaseMigration, taskFactory: onProgress => Task.Run(async() => { onProgress?.Invoke(result.Progress); var message = $"Importing from RavenDB {GetDescription(_version)}, " + $"build version: {_buildVersion}"; if (string.IsNullOrWhiteSpace(_fullVersion) == false) { message += $", full version: {_fullVersion}"; } result.AddMessage(message); using (cancelToken) { try { var migrationStateKey = $"{MigrationStateKeyBase}/" + $"{GetDescription(_version)}/" + $"{sourceDatabaseName}/" + $"{_serverUrl}"; AbstractMigrator migrator; switch (_version) { case MajorVersion.V2: migrator = new Migrator_V2(_serverUrl, sourceDatabaseName, result, onProgress, database, _client, cancelToken); break; case MajorVersion.V30: case MajorVersion.V35: migrator = new Migrator_V3(_serverUrl, sourceDatabaseName, result, onProgress, database, _client, migrationStateKey, _version, cancelToken); break; case MajorVersion.V4: migrator = new Importer(_serverUrl, sourceDatabaseName, result, onProgress, database, migrationStateKey, cancelToken); break; default: throw new ArgumentOutOfRangeException(nameof(_version), _version, null); } using (migrator) { await migrator.Execute(); } } catch (Exception e) { result.AddError($"Error occurred during database migration named: {sourceDatabaseName}." + $"Exception: {e.Message}"); throw; } } return((IOperationResult)result); }, cancelToken.Token), id: operationId, token: cancelToken); return(operationId); }