public async Task GetImport() { if (HttpContext.Request.Query.ContainsKey("file") == false && HttpContext.Request.Query.ContainsKey("url") == false) { throw new ArgumentException("'file' or 'url' are mandatory when using GET /smuggler/import"); } using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { var options = DatabaseSmugglerOptionsServerSide.Create(HttpContext); await using (var stream = new GZipStream(new BufferedStream(await GetImportStream(), 128 * Voron.Global.Constants.Size.Kilobyte), CompressionMode.Decompress)) using (var token = CreateOperationToken()) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, token: token.Token); var result = await smuggler.ExecuteAsync(); await WriteImportResultAsync(context, result, ResponseBodyStream()); } } }
private async Task <IOperationResult> ExportDatabaseInternalAsync( DatabaseSmugglerOptionsServerSide options, long startDocumentEtag, long startRaftIndex, Action <IOperationProgress> onProgress, DocumentsOperationContext context, OperationCancelToken token) { using (token) { var source = new DatabaseSource(Database, startDocumentEtag, startRaftIndex, Logger); await using (var outputStream = GetOutputStream(ResponseBodyStream(), options)) { var destination = new StreamDestination(outputStream, context, source); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, onProgress: onProgress, token: token.Token); return(await smuggler.ExecuteAsync()); } } }
public Task <HttpResponseMessage> ExportDatabase([FromBody] ExportData exportData) { var requestString = exportData.SmugglerOptions; DatabaseSmugglerOptions smugglerOptions; using (var jsonReader = new RavenJsonTextReader(new StringReader(requestString))) { var serializer = JsonExtensions.CreateDefaultJsonSerializer(); smugglerOptions = (DatabaseSmugglerOptions)serializer.Deserialize(jsonReader, typeof(DatabaseSmugglerOptions)); } var result = GetEmptyMessage(); // create PushStreamContent object that will be called when the output stream will be ready. result.Content = new PushStreamContent(async(outputStream, content, arg3) => { try { var smuggler = new DatabaseSmuggler(smugglerOptions, new DatabaseSmugglerEmbeddedSource(Database), new DatabaseSmugglerStreamDestination(outputStream, leaveOpen: true)); await smuggler.ExecuteAsync().ConfigureAwait(false); } finally { outputStream.Close(); } }); var fileName = string.IsNullOrEmpty(exportData.FileName) || (exportData.FileName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0) ? string.Format("Dump of {0}, {1}", DatabaseName, DateTime.Now.ToString("yyyy-MM-dd HH-mm")) : exportData.FileName; result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = fileName + ".ravendump" }; return(new CompletedTask <HttpResponseMessage>(result)); }
public async Task <HttpResponseMessage> CreateSampleData() { var results = Database.Queries.Query(Constants.DocumentsByEntityNameIndex, new IndexQuery(), CancellationToken.None); if (results.Results.Count > 0) { return(GetMessageWithString("You cannot create sample data in a database that already contains documents", HttpStatusCode.BadRequest)); } using (var stream = typeof(StudioTasksController).Assembly.GetManifestResourceStream("Raven.Database.Server.Assets.EmbeddedData.Northwind.dump")) { var smuggler = new DatabaseSmuggler(new DatabaseSmugglerOptions { OperateOnTypes = DatabaseItemType.Documents | DatabaseItemType.Indexes | DatabaseItemType.Transformers, ShouldExcludeExpired = false }, new DatabaseSmugglerStreamSource(stream), new DatabaseSmugglerEmbeddedDestination(Database)); await smuggler.ExecuteAsync().ConfigureAwait(false); } return(GetEmptyMessage()); }
public async Task <HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, bool stripReplicationInformation, bool shouldDisableVersioningBundle, DatabaseItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } string tempPath = Database.Configuration.Core.TempPath; var fullTempPath = tempPath + Constants.TempUploadsDirectoryName; if (File.Exists(fullTempPath)) { File.Delete(fullTempPath); } if (Directory.Exists(fullTempPath) == false) { Directory.CreateDirectory(fullTempPath); } var streamProvider = new MultipartFileStreamProvider(fullTempPath); await Request.Content.ReadAsMultipartAsync(streamProvider).ConfigureAwait(false); var uploadedFilePath = streamProvider.FileData[0].LocalFileName; string fileName = null; var fileContent = streamProvider.Contents.SingleOrDefault(); if (fileContent != null) { fileName = fileContent.Headers.ContentDisposition.FileName.Replace("\"", string.Empty); } var status = new ImportOperationStatus(); var cts = new CancellationTokenSource(); var task = Task.Run(async() => { try { using (var fileStream = File.Open(uploadedFilePath, FileMode.Open, FileAccess.Read)) { var smugglerOptions = new DatabaseSmugglerOptions(); smugglerOptions.BatchSize = batchSize; smugglerOptions.ShouldExcludeExpired = !includeExpiredDocuments; smugglerOptions.StripReplicationInformation = stripReplicationInformation; smugglerOptions.ShouldDisableVersioningBundle = shouldDisableVersioningBundle; smugglerOptions.OperateOnTypes = operateOnTypes; smugglerOptions.TransformScript = transformScript; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { smugglerOptions.Filters.AddRange(filtersPipeDelimited .Split(new[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List <string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } var smuggler = new DatabaseSmuggler(smugglerOptions, new DatabaseSmugglerStreamSource(fileStream), new DatabaseSmugglerEmbeddedDestination(Database)); smuggler.Notifications.OnProgress += (sender, message) => status.LastProgress = message; await smuggler.ExecuteAsync(cts.Token).ConfigureAwait(false); } } catch (Exception e) { status.Faulted = true; status.State = RavenJObject.FromObject(new { Error = e.ToString() }); if (cts.Token.IsCancellationRequested) { status.State = RavenJObject.FromObject(new { Error = "Task was cancelled" }); cts.Token.ThrowIfCancellationRequested(); //needed for displaying the task status as canceled and not faulted } if (e is InvalidDataException) { status.ExceptionDetails = e.Message; } else if (e is JsonReaderException) { status.ExceptionDetails = "Failed to load JSON Data. Please make sure you are importing .ravendump file, exported by smuggler (aka database export). If you are importing a .ravnedump file then the file may be corrupted"; } else if (e is OperationVetoedException && e.Message.Contains(VersioningPutTrigger.CreationOfHistoricalRevisionIsNotAllowed)) { status.ExceptionDetails = "You are trying to import historical documents while the versioning bundle is enabled. " + "The versioning bundle is enabled. You should disable versioning during import. " + "Please mark the checkbox 'Disable versioning bundle during import' at Import Database: Advanced settings before importing"; } else { status.ExceptionDetails = e.ToString(); } throw; } finally { status.Completed = true; File.Delete(uploadedFilePath); } }, cts.Token); long id; Database.Tasks.AddTask(task, status, new TaskActions.PendingTaskDescription { StartTime = SystemTime.UtcNow, TaskType = TaskActions.PendingTaskType.ImportDatabase, Payload = fileName, }, out id, cts); return(GetMessageWithObject(new { OperationId = id }, HttpStatusCode.Accepted)); }
private async Task BulkImport(BlockingCollection <Func <Task <Stream> > > files, string directory) { var maxTasks = GetIntValueQueryString("maxTasks", required: false) ?? ProcessorInfo.ProcessorCount / 2; var results = new ConcurrentQueue <SmugglerResult>(); var tasks = new Task[Math.Max(1, maxTasks)]; var finalResult = new SmugglerResult(); for (int i = 0; i < tasks.Length; i++) { tasks[i] = Task.Run(async() => { while (files.IsCompleted == false) { Func <Task <Stream> > getFile; try { getFile = files.Take(); } catch (Exception) { continue; } using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) await using (var file = await getFile()) await using (var stream = new GZipStream(new BufferedStream(file, 128 * Voron.Global.Constants.Size.Kilobyte), CompressionMode.Decompress)) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time); var result = await smuggler.ExecuteAsync(); results.Enqueue(result); } } }); } await Task.WhenAll(tasks); while (results.TryDequeue(out SmugglerResult importResult)) { finalResult.Documents.SkippedCount += importResult.Documents.SkippedCount; finalResult.Documents.ReadCount += importResult.Documents.ReadCount; finalResult.Documents.ErroredCount += importResult.Documents.ErroredCount; finalResult.Documents.LastEtag = Math.Max(finalResult.Documents.LastEtag, importResult.Documents.LastEtag); finalResult.Documents.Attachments = importResult.Documents.Attachments; finalResult.RevisionDocuments.ReadCount += importResult.RevisionDocuments.ReadCount; finalResult.RevisionDocuments.ErroredCount += importResult.RevisionDocuments.ErroredCount; finalResult.RevisionDocuments.LastEtag = Math.Max(finalResult.RevisionDocuments.LastEtag, importResult.RevisionDocuments.LastEtag); finalResult.RevisionDocuments.Attachments = importResult.RevisionDocuments.Attachments; finalResult.Counters.ReadCount += importResult.Counters.ReadCount; finalResult.Counters.ErroredCount += importResult.Counters.ErroredCount; finalResult.Counters.LastEtag = Math.Max(finalResult.Counters.LastEtag, importResult.Counters.LastEtag); finalResult.TimeSeries.ReadCount += importResult.TimeSeries.ReadCount; finalResult.TimeSeries.ErroredCount += importResult.TimeSeries.ErroredCount; finalResult.TimeSeries.LastEtag = Math.Max(finalResult.TimeSeries.LastEtag, importResult.TimeSeries.LastEtag); finalResult.Identities.ReadCount += importResult.Identities.ReadCount; finalResult.Identities.ErroredCount += importResult.Identities.ErroredCount; finalResult.CompareExchange.ReadCount += importResult.CompareExchange.ReadCount; finalResult.CompareExchange.ErroredCount += importResult.CompareExchange.ErroredCount; finalResult.Subscriptions.ReadCount += importResult.Subscriptions.ReadCount; finalResult.Subscriptions.ErroredCount += importResult.Subscriptions.ErroredCount; finalResult.Indexes.ReadCount += importResult.Indexes.ReadCount; finalResult.Indexes.ErroredCount += importResult.Indexes.ErroredCount; foreach (var message in importResult.Messages) { finalResult.AddMessage(message); } } using (ContextPool.AllocateOperationContext(out DocumentsOperationContext finalContext)) { var memoryStream = new MemoryStream(); await WriteImportResultAsync(finalContext, finalResult, memoryStream); memoryStream.Position = 0; try { await using (var output = File.Create(Path.Combine(directory, "smuggler.results.txt"))) { await memoryStream.CopyToAsync(output); } } catch (Exception) { // ignore any failure here } memoryStream.Position = 0; await memoryStream.CopyToAsync(ResponseBodyStream()); } }
private void TimerCallback(bool fullBackup) { if (currentTask != null) { return; } if (Database.Disposed) { Dispose(); return; } // we have shared lock for both incremental and full backup. lock (this) { if (currentTask != null) { return; } currentTask = Task.Factory.StartNew(async() => { var documentDatabase = Database; if (documentDatabase == null) { return; } using (LogContext.WithDatabase(documentDatabase.Name)) { try { DatabaseSmugglerOperationState exportResult; bool performAnotherRun = false; do { var localBackupConfigs = exportConfigs; var localBackupStatus = exportStatus; if (localBackupConfigs == null) { return; } if (localBackupConfigs.Disabled) { return; } var source = new DatabaseSmugglerEmbeddedSource(Database); if (fullBackup == false) { var currentEtags = await source.FetchCurrentMaxEtagsAsync(CancellationToken.None).ConfigureAwait(false); // No-op if nothing has changed if (currentEtags.LastDocsEtag == localBackupStatus.LastDocsEtag && currentEtags.LastDocDeleteEtag == localBackupStatus.LastDocsDeletionEtag) { return; } } var backupPath = localBackupConfigs.LocalFolderName ?? Path.Combine(documentDatabase.Configuration.Core.DataDirectory, "PeriodicExport-Temp"); if (Directory.Exists(backupPath) == false) { Directory.CreateDirectory(backupPath); } if (fullBackup) { // create filename for full dump backupPath = Path.Combine(backupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-full-dump"); if (File.Exists(backupPath)) { var counter = 1; while (true) { backupPath = Path.Combine(Path.GetDirectoryName(backupPath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-full-dump"); if (File.Exists(backupPath) == false) { break; } counter++; } } } var smugglerOptions = new DatabaseSmugglerOptions { Limit = backupLimit }; var smugglerFileOptions = new DatabaseSmugglerFileDestinationOptions(); if (fullBackup == false) { smugglerOptions.StartDocsEtag = localBackupStatus.LastDocsEtag; smugglerOptions.StartDocsDeletionEtag = localBackupStatus.LastDocsDeletionEtag; smugglerFileOptions.Incremental = true; } var smuggler = new DatabaseSmuggler(smugglerOptions, source, new DatabaseSmugglerFileDestination(backupPath, smugglerFileOptions)); exportResult = await smuggler.ExecuteAsync().ConfigureAwait(false); if (fullBackup == false) { // No-op if nothing has changed if (exportResult.LastDocsEtag == localBackupStatus.LastDocsEtag && exportResult.LastDocDeleteEtag == localBackupStatus.LastDocsDeletionEtag) { logger.Info("Periodic export returned prematurely, nothing has changed since last export"); return; } } try { UploadToServer(exportResult.FilePath, localBackupConfigs, fullBackup); } finally { // if user did not specify local folder we delete temporary file. if (String.IsNullOrEmpty(localBackupConfigs.LocalFolderName)) { IOExtensions.DeleteFile(exportResult.FilePath); } } localBackupStatus.LastDocsEtag = exportResult.LastDocsEtag; localBackupStatus.LastDocsDeletionEtag = exportResult.LastDocDeleteEtag; if (fullBackup) { localBackupStatus.LastFullBackup = SystemTime.UtcNow; } else { localBackupStatus.LastBackup = SystemTime.UtcNow; } var ravenJObject = JsonExtensions.ToJObject(localBackupStatus); ravenJObject.Remove("Id"); var putResult = documentDatabase.Documents.Put(PeriodicExportStatus.RavenDocumentKey, null, ravenJObject, new RavenJObject(), null); // this result in exportStatus being refreshed localBackupStatus = exportStatus; if (localBackupStatus != null) { if (localBackupStatus.LastDocsEtag.IncrementBy(1) == putResult.ETag) // the last etag is with just us { localBackupStatus.LastDocsEtag = putResult.ETag; // so we can skip it for the next time } } if (backupLimit != int.MaxValue) { backupLimit = int.MaxValue; performAnotherRun = true; } else { performAnotherRun = false; } } while (performAnotherRun); } catch (ObjectDisposedException) { // shutting down, probably } catch (Exception e) { backupLimit = 100; logger.ErrorException("Error when performing periodic export", e); Database.AddAlert(new Alert { AlertLevel = AlertLevel.Error, CreatedAt = SystemTime.UtcNow, Message = e.Message, Title = "Error in Periodic Export", Exception = e.ToString(), UniqueKey = "Periodic Export Error", }); } } }) .Unwrap(); currentTask.ContinueWith(_ => { currentTask = null; }); } }