public IEnumerable <DocumentItem> GetDocuments(List <string> collectionsToExport, INewDocumentActions actions) { var line = 0; while (_csvReader.Read()) { line++; if (ProcessFieldsIfNeeded()) { continue; } var context = actions.GetContextForNewDocument(); DocumentItem item; try { item = ConvertRecordToDocumentItem(context, _csvReader.Context.Record, _csvReaderFieldHeaders, _collection); } catch (Exception e) { _result.AddError($"Fail to parse CSV line {line}, Error:{e}"); _result.Documents.ErroredCount++; continue; } yield return(item); } }
private void CompletePendingTransactions(SmugglerResult result) { // If we export documents from a database, // we should wait for all the pending transactions to be completed first. var shouldExecute = _options.ExecutePendingClusterTransactions || (_options.OperateOnTypes.HasFlag(DatabaseItemType.Documents) && _source is DatabaseSource); if (shouldExecute == false) { return; } _database.ExectueClusterTransactionOnDatabase(WaitForClusterTransactionCompletion); void WaitForClusterTransactionCompletion(IReadOnlyList <Task> transactionTasks) { result.AddInfo($"Has to processing {transactionTasks.Count} cluster transactions before the export can take place."); _onProgress.Invoke(result.Progress); for (var index = 0; index < transactionTasks.Count; index++) { var task = transactionTasks[index]; _token.ThrowIfCancellationRequested(); while (task.IsCompleted == false) { _token.ThrowIfCancellationRequested(); if (task.Wait((int)TimeSpan.FromSeconds(10).TotalMilliseconds, _token) == false) { _token.ThrowIfCancellationRequested(); result.AddInfo($"Processing cluster transaction {index}."); _onProgress.Invoke(result.Progress); } } if (task.IsCompletedSuccessfully) { result.AddInfo($"Cluster transaction {index} out of {transactionTasks.Count} is completed."); _onProgress.Invoke(result.Progress); } if (task.IsCanceled) { result.AddInfo($"Cluster transaction {index} was canceled."); _onProgress.Invoke(result.Progress); } if (task.IsFaulted) { _result.AddError($"Cluster transaction {index} is faulted: {task.Exception}."); _onProgress.Invoke(result.Progress); } } } }
public void Can_Build_Serializator_For_SmugglerResult() { using (var context = JsonOperationContext.ShortTermSingleUse()) { var result = new SmugglerResult(); result.AddError("MessageA"); result.AddInfo("MessageB"); result.AddWarning("MessageC"); result.AddMessage("MessageD"); var djv = result.ToJson(); var json = context.ReadObject(djv, "smuggler/result"); var result2 = JsonDeserializationClient.SmugglerResult(json); Assert.Equal(result.Messages, result2.Messages); var result3 = DocumentConventions.Default.Serialization.DefaultConverter.FromBlittable <SmugglerResult>(json); Assert.Equal(result.Messages, result3.Messages); } }
public async Task PostImportAsync() { using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { if (HttpContext.Request.HasFormContentType == false) { HttpContext.Response.StatusCode = (int)HttpStatusCode.BadRequest; // Bad request using (var writer = new BlittableJsonTextWriter(context, ResponseBodyStream())) { context.Write(writer, new DynamicJsonValue { ["Type"] = "Error", ["Error"] = "This endpoint requires form content type" }); return; } } var operationId = GetLongQueryString("operationId"); var token = CreateOperationToken(); var result = new SmugglerResult(); await Database.Operations.AddOperation(Database, "Import to: " + Database.Name, Operations.OperationType.DatabaseImport, onProgress => { return(Task.Run(async() => { try { var boundary = MultipartRequestHelper.GetBoundary( MediaTypeHeaderValue.Parse(HttpContext.Request.ContentType), MultipartRequestHelper.MultipartBoundaryLengthLimit); var reader = new MultipartReader(boundary, HttpContext.Request.Body); DatabaseSmugglerOptionsServerSide options = null; while (true) { var section = await reader.ReadNextSectionAsync().ConfigureAwait(false); if (section == null) { break; } if (ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out ContentDispositionHeaderValue contentDisposition) == false) { continue; } if (MultipartRequestHelper.HasFormDataContentDisposition(contentDisposition)) { var key = HeaderUtilities.RemoveQuotes(contentDisposition.Name); if (key != "importOptions") { continue; } BlittableJsonReaderObject blittableJson; if (section.Headers.ContainsKey("Content-Encoding") && section.Headers["Content-Encoding"] == "gzip") { using (var gzipStream = new GZipStream(section.Body, CompressionMode.Decompress)) { blittableJson = await context.ReadForMemoryAsync(gzipStream, "importOptions"); } } else { blittableJson = await context.ReadForMemoryAsync(section.Body, "importOptions"); } options = JsonDeserializationServer.DatabaseSmugglerOptions(blittableJson); continue; } if (MultipartRequestHelper.HasFileContentDisposition(contentDisposition) == false) { continue; } var stream = new GZipStream(section.Body, CompressionMode.Decompress); DoImportInternal(context, stream, options, result, onProgress, token); } } catch (Exception e) { result.AddError($"Error occurred during export. Exception: {e.Message}"); throw; } return (IOperationResult)result; })); }, operationId, token).ConfigureAwait(false); WriteImportResult(context, result, ResponseBodyStream()); }
public long StartMigratingSingleDatabase(DatabaseMigrationSettings databaseMigrationSettings, DocumentDatabase database) { var operationId = database.Operations.GetNextOperationId(); var cancelToken = new OperationCancelToken(database.DatabaseShutdown); var result = new SmugglerResult(); var databaseName = databaseMigrationSettings.DatabaseName; database.Operations.AddOperation(null, $"Database name: '{databaseName}' from url: {_serverUrl}", Operations.OperationType.DatabaseMigrationRavenDb, taskFactory: onProgress => Task.Run(async() => { onProgress?.Invoke(result.Progress); var message = $"Importing from RavenDB {EnumHelper.GetDescription(_buildMajorVersion)}"; result.AddInfo(message); using (cancelToken) { try { var migrationStateKey = $"{MigrationStateKeyBase}" + $"{EnumHelper.GetDescription(_buildMajorVersion)}/" + $"{databaseName}/" + $"{_serverUrl}"; var options = new MigratorOptions { MigrationStateKey = migrationStateKey, ServerUrl = _serverUrl, DatabaseName = databaseName, ApiKey = _apiKey, TransformScript = databaseMigrationSettings.TransformScript, EnableBasicAuthenticationOverUnsecuredHttp = _enableBasicAuthenticationOverUnsecuredHttp, SkipServerCertificateValidation = _skipServerCertificateValidation, RemoveAnalyzers = databaseMigrationSettings.RemoveAnalyzers, ImportRavenFs = databaseMigrationSettings.ImportRavenFs, OperateOnTypes = databaseMigrationSettings.OperateOnTypes, OperateOnDatabaseRecordTypes = databaseMigrationSettings.OperateOnDatabaseRecordTypes }; var parameters = new MigratorParameters { HttpClient = _httpClient, Result = result, OnProgress = onProgress, Database = database, CancelToken = cancelToken }; AbstractMigrator migrator; switch (_buildMajorVersion) { case MajorVersion.V2: migrator = new Migrator_V2(options, parameters); break; case MajorVersion.V30: case MajorVersion.V35: migrator = new Migrator_V3(options, parameters, _buildMajorVersion, _buildVersion); break; case MajorVersion.V4: migrator = new Importer(options, parameters, _buildVersion); break; default: throw new ArgumentOutOfRangeException(nameof(_buildMajorVersion), _buildMajorVersion, null); } await migrator.Execute(); } catch (Exception e) { result.AddError($"Error occurred during database migration named: {databaseName}." + $"Exception: {e.Message}"); throw; } } return((IOperationResult)result); }, cancelToken.Token), id: operationId, token: cancelToken); return(operationId); }
public async Task ImportFromCsv() { using (Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { if (HttpContext.Request.HasFormContentType == false) { HttpContext.Response.StatusCode = (int)HttpStatusCode.BadRequest; using (var writer = new BlittableJsonTextWriter(context, ResponseBodyStream())) { context.Write(writer, new DynamicJsonValue { ["Type"] = "Error", ["Error"] = "Import from csv requires form content type" }); return; } } var token = new OperationCancelToken(Database.DatabaseShutdown); var result = new SmugglerResult(); var operationId = GetLongQueryString("operationId", false) ?? Database.Operations.GetNextOperationId(); var collection = GetStringQueryString("collection", false); var operationDescription = collection != null ? "Import collection: " + collection : "Import collection from CSV"; await Database.Operations.AddOperation(Database, operationDescription, Raven.Server.Documents.Operations.Operations.OperationType.CollectionImportFromCsv, onProgress => { return Task.Run(async () => { try { var reader = new MultipartReader(MultipartRequestHelper.GetBoundary(MediaTypeHeaderValue.Parse(HttpContext.Request.ContentType), MultipartRequestHelper.MultipartBoundaryLengthLimit), HttpContext.Request.Body); while (true) { var section = await reader.ReadNextSectionAsync().ConfigureAwait(false); if (section == null) break; if (ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out ContentDispositionHeaderValue contentDisposition) == false) continue; if (MultipartRequestHelper.HasFileContentDisposition(contentDisposition)) { if (ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out contentDisposition) == false) continue; if (string.IsNullOrEmpty(collection)) { var fileName = contentDisposition.FileName.ToString().Trim('\"'); collection = Inflector.Pluralize(CSharpClassName.ConvertToValidClassName(Path.GetFileNameWithoutExtension(fileName))); } var options = new DatabaseSmugglerOptionsServerSide(); if (section.Headers.ContainsKey("Content-Encoding") && section.Headers["Content-Encoding"] == "gzip") { using (var gzipStream = new GZipStream(section.Body, CompressionMode.Decompress)) { ImportDocumentsFromCsvStream(gzipStream, context, collection, options, result, onProgress, token); } } else { ImportDocumentsFromCsvStream(section.Body, context, collection, options, result, onProgress, token); } } } } catch (Exception e) { result.AddError($"Error occurred during csv import. Exception: {e.Message}"); throw; } return (IOperationResult)result; }); }, operationId, token); WriteImportResult(context, result, ResponseBodyStream()); } }
public long StartMigrateSingleDatabase(string sourceDatabaseName, DocumentDatabase database) { var operationId = database.Operations.GetNextOperationId(); var cancelToken = new OperationCancelToken(_cancellationToken); var result = new SmugglerResult(); database.Operations.AddOperation(null, $"Database name: '{sourceDatabaseName}' from url: {_serverUrl}", Operations.OperationType.DatabaseMigration, taskFactory: onProgress => Task.Run(async() => { onProgress?.Invoke(result.Progress); var message = $"Importing from RavenDB {GetDescription(_version)}, " + $"build version: {_buildVersion}"; if (string.IsNullOrWhiteSpace(_fullVersion) == false) { message += $", full version: {_fullVersion}"; } result.AddMessage(message); using (cancelToken) { try { var migrationStateKey = $"{MigrationStateKeyBase}/" + $"{GetDescription(_version)}/" + $"{sourceDatabaseName}/" + $"{_serverUrl}"; AbstractMigrator migrator; switch (_version) { case MajorVersion.V2: migrator = new Migrator_V2(_serverUrl, sourceDatabaseName, result, onProgress, database, _client, cancelToken); break; case MajorVersion.V30: case MajorVersion.V35: migrator = new Migrator_V3(_serverUrl, sourceDatabaseName, result, onProgress, database, _client, migrationStateKey, _version, cancelToken); break; case MajorVersion.V4: migrator = new Importer(_serverUrl, sourceDatabaseName, result, onProgress, database, migrationStateKey, cancelToken); break; default: throw new ArgumentOutOfRangeException(nameof(_version), _version, null); } using (migrator) { await migrator.Execute(); } } catch (Exception e) { result.AddError($"Error occurred during database migration named: {sourceDatabaseName}." + $"Exception: {e.Message}"); throw; } } return((IOperationResult)result); }, cancelToken.Token), id: operationId, token: cancelToken); return(operationId); }