private void CompletePendingTransactions(SmugglerResult result) { // If we export documents from a database, // we should wait for all the pending transactions to be completed first. var shouldExecute = _options.ExecutePendingClusterTransactions || (_options.OperateOnTypes.HasFlag(DatabaseItemType.Documents) && _source is DatabaseSource); if (shouldExecute == false) { return; } _database.ExectueClusterTransactionOnDatabase(WaitForClusterTransactionCompletion); void WaitForClusterTransactionCompletion(IReadOnlyList <Task> transactionTasks) { result.AddInfo($"Has to processing {transactionTasks.Count} cluster transactions before the export can take place."); _onProgress.Invoke(result.Progress); for (var index = 0; index < transactionTasks.Count; index++) { var task = transactionTasks[index]; _token.ThrowIfCancellationRequested(); while (task.IsCompleted == false) { _token.ThrowIfCancellationRequested(); if (task.Wait((int)TimeSpan.FromSeconds(10).TotalMilliseconds, _token) == false) { _token.ThrowIfCancellationRequested(); result.AddInfo($"Processing cluster transaction {index}."); _onProgress.Invoke(result.Progress); } } if (task.IsCompletedSuccessfully) { result.AddInfo($"Cluster transaction {index} out of {transactionTasks.Count} is completed."); _onProgress.Invoke(result.Progress); } if (task.IsCanceled) { result.AddInfo($"Cluster transaction {index} was canceled."); _onProgress.Invoke(result.Progress); } if (task.IsFaulted) { _result.AddError($"Cluster transaction {index} is faulted: {task.Exception}."); _onProgress.Invoke(result.Progress); } } } }
private void ImportDocumentsFromCsvStream(Stream stream, DocumentsOperationContext context, string entity, DatabaseSmugglerOptionsServerSide options, SmugglerResult result, Action<IOperationProgress> onProgress, OperationCancelToken token) { if (string.IsNullOrEmpty(entity) == false && char.IsLower(entity[0])) entity = char.ToUpper(entity[0]) + entity.Substring(1); result.AddInfo($"Import collection: {entity}"); using (var source = new CsvStreamSource(Database, stream, context, entity)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, result, onProgress, token.Token); smuggler.Execute(); } }
private void ProcessDocumentsWithDuplicateCollection(SmugglerResult result) { var didWork = false; var count = 0; using (var actions = _destination.Documents()) { foreach (var item in actions.GetDocumentsWithDuplicateCollection()) { if (didWork == false) { result.AddInfo("Starting to process documents with duplicate collection."); didWork = true; } actions.WriteDocument(item, result.Documents); count++; } } if (didWork) { result.AddInfo($"Finished processing '{count}' documents with duplicate collection."); } }
public void Can_Build_Serializator_For_SmugglerResult() { using (var context = JsonOperationContext.ShortTermSingleUse()) { var result = new SmugglerResult(); result.AddError("MessageA"); result.AddInfo("MessageB"); result.AddWarning("MessageC"); result.AddMessage("MessageD"); var djv = result.ToJson(); var json = context.ReadObject(djv, "smuggler/result"); var result2 = JsonDeserializationClient.SmugglerResult(json); Assert.Equal(result.Messages, result2.Messages); var result3 = DocumentConventions.Default.Serialization.DefaultConverter.FromBlittable <SmugglerResult>(json); Assert.Equal(result.Messages, result3.Messages); } }
private void SkipType(DatabaseItemType type, SmugglerResult result, bool ensureStepProcessed = true) { result.AddInfo($"Skipping '{type}' processing."); _onProgress.Invoke(result.Progress); SmugglerProgressBase.Counts counts; switch (type) { case DatabaseItemType.DatabaseRecord: counts = result.DatabaseRecord; break; case DatabaseItemType.Documents: counts = result.Documents; break; case DatabaseItemType.RevisionDocuments: counts = result.RevisionDocuments; break; case DatabaseItemType.Tombstones: counts = result.Tombstones; break; case DatabaseItemType.Conflicts: counts = result.Conflicts; break; case DatabaseItemType.Indexes: counts = result.Indexes; break; case DatabaseItemType.Identities: counts = result.Identities; break; case DatabaseItemType.CompareExchange: counts = result.CompareExchange; break; case DatabaseItemType.Counters: counts = result.Counters; break; case DatabaseItemType.LegacyDocumentDeletions: counts = new SmugglerProgressBase.Counts(); break; default: throw new ArgumentOutOfRangeException(nameof(type), type, null); } void OnSkipped(long skipped) { if (type == DatabaseItemType.Documents) { result.Documents.SkippedCount = skipped; } if (skipped % 10000 != 0) { return; } result.AddInfo($"Skipped {skipped:#,#;;0} {type.ToString().ToLowerInvariant()}"); _onProgress.Invoke(result.Progress); } var numberOfItemsSkipped = _source.SkipType(type, onSkipped: OnSkipped); if (ensureStepProcessed == false) { return; } counts.Skipped = true; counts.Processed = true; if (numberOfItemsSkipped > 0) { counts.ReadCount = numberOfItemsSkipped; result.AddInfo($"Skipped '{type}' processing. Skipped {numberOfItemsSkipped:#,#;;0} items."); } else { result.AddInfo($"Skipped '{type}' processing."); } _onProgress.Invoke(result.Progress); }
private void ProcessType(DatabaseItemType type, SmugglerResult result, BuildVersionType buildType, bool ensureStepsProcessed = true) { if ((_options.OperateOnTypes & type) != type) { switch (type) { case DatabaseItemType.LegacyDocumentDeletions: // process only those when we are processing documents if ((_options.OperateOnTypes & DatabaseItemType.Documents) != DatabaseItemType.Documents) { SkipType(type, result, ensureStepsProcessed); return; } break; case DatabaseItemType.LegacyAttachments: case DatabaseItemType.LegacyAttachmentDeletions: // we cannot skip those? break; default: SkipType(type, result, ensureStepsProcessed); return; } } result.AddInfo($"Started processing {type}."); _onProgress.Invoke(result.Progress); SmugglerProgressBase.Counts counts; switch (type) { case DatabaseItemType.DatabaseRecord: counts = ProcessDatabaseRecord(result); break; case DatabaseItemType.Documents: counts = ProcessDocuments(result, buildType); break; case DatabaseItemType.RevisionDocuments: counts = ProcessRevisionDocuments(result); break; case DatabaseItemType.Tombstones: counts = ProcessTombstones(result); break; case DatabaseItemType.Conflicts: counts = ProcessConflicts(result); break; case DatabaseItemType.Indexes: counts = ProcessIndexes(result); break; case DatabaseItemType.Identities: counts = ProcessIdentities(result); break; case DatabaseItemType.LegacyAttachments: counts = ProcessLegacyAttachments(result); break; case DatabaseItemType.LegacyDocumentDeletions: counts = ProcessLegacyDocumentDeletions(result); break; case DatabaseItemType.LegacyAttachmentDeletions: counts = ProcessLegacyAttachmentDeletions(result); break; case DatabaseItemType.CompareExchange: counts = ProcessCompareExchange(result); break; case DatabaseItemType.Counters: counts = ProcessCounters(result); break; default: throw new ArgumentOutOfRangeException(nameof(type), type, null); } counts.Processed = true; if (counts is SmugglerProgressBase.CountsWithLastEtag countsWithEtag) { countsWithEtag.Attachments.Processed = true; } result.AddInfo($"Finished processing {type}. {counts}"); _onProgress.Invoke(result.Progress); }
public long StartMigratingSingleDatabase(DatabaseMigrationSettings databaseMigrationSettings, DocumentDatabase database) { var operationId = database.Operations.GetNextOperationId(); var cancelToken = new OperationCancelToken(database.DatabaseShutdown); var result = new SmugglerResult(); var databaseName = databaseMigrationSettings.DatabaseName; database.Operations.AddOperation(null, $"Database name: '{databaseName}' from url: {_serverUrl}", Operations.OperationType.DatabaseMigrationRavenDb, taskFactory: onProgress => Task.Run(async() => { onProgress?.Invoke(result.Progress); var message = $"Importing from RavenDB {EnumHelper.GetDescription(_buildMajorVersion)}"; result.AddInfo(message); using (cancelToken) { try { var migrationStateKey = $"{MigrationStateKeyBase}" + $"{EnumHelper.GetDescription(_buildMajorVersion)}/" + $"{databaseName}/" + $"{_serverUrl}"; var options = new MigratorOptions { MigrationStateKey = migrationStateKey, ServerUrl = _serverUrl, DatabaseName = databaseName, ApiKey = _apiKey, TransformScript = databaseMigrationSettings.TransformScript, EnableBasicAuthenticationOverUnsecuredHttp = _enableBasicAuthenticationOverUnsecuredHttp, SkipServerCertificateValidation = _skipServerCertificateValidation, RemoveAnalyzers = databaseMigrationSettings.RemoveAnalyzers, ImportRavenFs = databaseMigrationSettings.ImportRavenFs, OperateOnTypes = databaseMigrationSettings.OperateOnTypes, OperateOnDatabaseRecordTypes = databaseMigrationSettings.OperateOnDatabaseRecordTypes }; var parameters = new MigratorParameters { HttpClient = _httpClient, Result = result, OnProgress = onProgress, Database = database, CancelToken = cancelToken }; AbstractMigrator migrator; switch (_buildMajorVersion) { case MajorVersion.V2: migrator = new Migrator_V2(options, parameters); break; case MajorVersion.V30: case MajorVersion.V35: migrator = new Migrator_V3(options, parameters, _buildMajorVersion, _buildVersion); break; case MajorVersion.V4: migrator = new Importer(options, parameters, _buildVersion); break; default: throw new ArgumentOutOfRangeException(nameof(_buildMajorVersion), _buildMajorVersion, null); } await migrator.Execute(); } catch (Exception e) { result.AddError($"Error occurred during database migration named: {databaseName}." + $"Exception: {e.Message}"); throw; } } return((IOperationResult)result); }, cancelToken.Token), id: operationId, token: cancelToken); return(operationId); }
public long StartMigratingSingleDatabase(string sourceDatabaseName, DocumentDatabase database) { var operationId = database.Operations.GetNextOperationId(); var cancelToken = new OperationCancelToken(_cancellationToken); var result = new SmugglerResult(); database.Operations.AddOperation(null, $"Database name: '{sourceDatabaseName}' from url: {_serverUrl}", Operations.OperationType.DatabaseMigration, taskFactory: onProgress => Task.Run(async() => { onProgress?.Invoke(result.Progress); var majorVersion = _buildMajorVersion; var message = $"Importing from RavenDB {GetDescription(majorVersion)}"; result.AddInfo(message); using (cancelToken) { try { var migrationStateKey = $"{MigrationStateKeyBase}/" + $"{GetDescription(majorVersion)}/" + $"{sourceDatabaseName}/" + $"{_serverUrl}"; AbstractMigrator migrator; switch (majorVersion) { case MajorVersion.V2: migrator = new Migrator_V2(_serverUrl, sourceDatabaseName, result, onProgress, database, _client, cancelToken); break; case MajorVersion.V30: case MajorVersion.V35: migrator = new Migrator_V3(_serverUrl, sourceDatabaseName, result, onProgress, database, _client, migrationStateKey, majorVersion, cancelToken); break; case MajorVersion.V4: migrator = new Importer(_serverUrl, sourceDatabaseName, result, onProgress, database, migrationStateKey, cancelToken); break; default: throw new ArgumentOutOfRangeException(nameof(majorVersion), majorVersion, null); } using (migrator) { await migrator.Execute(); } } catch (Exception e) { result.AddError($"Error occurred during database migration named: {sourceDatabaseName}." + $"Exception: {e.Message}"); throw; } } return((IOperationResult)result); }, cancelToken.Token), id: operationId, token: cancelToken); return(operationId); }