private async Task MigrateDocuments(string lastEtag) { var url = $"{ServerUrl}/databases/{DatabaseName}/streams/docs?etag={lastEtag}"; var request = new HttpRequestMessage(HttpMethod.Get, url); var response = await HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, CancelToken.Token); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export documents from server: {ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) using (Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(responseStream, context, Database)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide { ReadLegacyEtag = true }; var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, Result, OnProgress, CancelToken.Token); // since we will be migrating indexes as separate task don't ensureStepsProcessed at this point smuggler.Execute(ensureStepsProcessed: false); } }
private async Task MigrateIndexes() { var response = await RunWithAuthRetry(async() => { var url = $"{Options.ServerUrl}/databases/{Options.DatabaseName}/indexes"; var request = new HttpRequestMessage(HttpMethod.Get, url); var responseMessage = await Parameters.HttpClient.SendAsync(request, Parameters.CancelToken.Token); return(responseMessage); }); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export indexes from server: {Options.ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) using (var indexesStream = new ArrayStream(responseStream, "Indexes")) // indexes endpoint returns an array using (Parameters.Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(indexesStream, context, Parameters.Database)) { var destination = new DatabaseDestination(Parameters.Database); var options = new DatabaseSmugglerOptionsServerSide { RemoveAnalyzers = Options.RemoveAnalyzers, }; var smuggler = new DatabaseSmuggler(Parameters.Database, source, destination, Parameters.Database.Time, options, Parameters.Result, Parameters.OnProgress, Parameters.CancelToken.Token); smuggler.Execute(); } }
private long CreateBackup( DatabaseSmugglerOptionsServerSide options, string backupFilePath, long?startDocumentEtag, Action <IOperationProgress> onProgress) { // the last etag is already included in the last backup startDocumentEtag = startDocumentEtag == null ? 0 : ++startDocumentEtag; using (var file = File.Open(backupFilePath, FileMode.CreateNew)) using (_database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { var smugglerSource = new DatabaseSource(_database, startDocumentEtag.Value); var smugglerDestination = new StreamDestination(file, context, smugglerSource); var smuggler = new DatabaseSmuggler(_database, smugglerSource, smugglerDestination, _database.Time, options: options, result: _backupResult, onProgress: onProgress, token: TaskCancelToken.Token); smuggler.Execute(); file.Flush(flushToDisk: true); return(smugglerSource.LastEtag); } }
public async Task Documents() { using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var stream = new ArrayStream(RequestBodyStream(), "Docs")) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide { ReadLegacyEtag = true, OperateOnTypes = DatabaseItemType.Documents }; var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options); var result = smuggler.Execute(); var replicationSource = GetSourceReplicationInformation(context, GetRemoteServerInstanceId(), out var documentId); replicationSource.LastDocumentEtag = result.LegacyLastDocumentEtag; replicationSource.Source = GetFromServer(); replicationSource.LastBatchSize = result.Documents.ReadCount + result.Tombstones.ReadCount; replicationSource.LastModified = DateTime.UtcNow; await SaveSourceReplicationInformation(replicationSource, context, documentId); } }
public Task PostCreateSampleData() { using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { using (context.OpenReadTransaction()) { foreach (var collection in Database.DocumentsStorage.GetCollections(context)) { if (collection.Count > 0) { throw new InvalidOperationException("You cannot create sample data in a database that already contains documents"); } } } using (var sampleData = typeof(SampleDataHandler).GetTypeInfo().Assembly .GetManifestResourceStream("Raven.Server.Web.Studio.EmbeddedData.Northwind_3.5.35168.ravendbdump")) { using (var stream = new GZipStream(sampleData, CompressionMode.Decompress)) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time); smuggler.Execute(); } } return(NoContent()); } }
private async Task MigrateIndexes() { var url = $"{ServerUrl}/databases/{DatabaseName}/indexes"; var request = new HttpRequestMessage(HttpMethod.Get, url); var response = await _client.SendAsync(request, CancelToken.Token); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export indexes from server: {ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) // indexes endpoint returns an array using (var indexesStream = new IndexesStream(responseStream)) using (Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(indexesStream, context, Database)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide(); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, Result, OnProgress, CancelToken.Token); smuggler.Execute(); } }
private async Task MigrateDatabase(string json) { var url = $"{ServerUrl}/databases/{DatabaseName}/studio-tasks/exportDatabase"; var content = new StringContent(json, Encoding.UTF8, "application/json"); var request = new HttpRequestMessage(HttpMethod.Post, url) { Content = content }; var response = await _client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, CancelToken.Token); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export database from server: {ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) using (var stream = new GZipStream(responseStream, mode: CompressionMode.Decompress)) using (Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(stream, context)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide(); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, Result, OnProgress, CancelToken.Token); smuggler.Execute(); } }
public async Task PostCreateSampleData() { using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { using (context.OpenReadTransaction()) { foreach (var collection in Database.DocumentsStorage.GetCollections(context)) { if (collection.Count > 0) { throw new InvalidOperationException("You cannot create sample data in a database that already contains documents"); } } } var editRevisions = new EditRevisionsConfigurationCommand(new RevisionsConfiguration { Collections = new Dictionary <string, RevisionsCollectionConfiguration> { ["Orders"] = new RevisionsCollectionConfiguration { Disabled = false } } }, Database.Name); var(index, _) = await ServerStore.SendToLeaderAsync(editRevisions); await Database.RachisLogIndexNotifications.WaitForIndexNotification(index, Database.ServerStore.Engine.OperationTimeout); using (var sampleData = typeof(SampleDataHandler).GetTypeInfo().Assembly .GetManifestResourceStream("Raven.Server.Web.Studio.EmbeddedData.Northwind.ravendbdump")) { using (var stream = new GZipStream(sampleData, CompressionMode.Decompress)) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options: new DatabaseSmugglerOptionsServerSide { OperateOnTypes = DatabaseItemType.Documents | DatabaseItemType.RevisionDocuments | DatabaseItemType.Attachments | DatabaseItemType.Indexes, SkipRevisionCreation = true }); smuggler.Execute(); } } await NoContent(); } }
private void HandleLegacyIndexes() { using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var stream = new ArrayStream(RequestBodyStream(), nameof(DatabaseItemType.Indexes))) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide { OperateOnTypes = DatabaseItemType.Indexes }; var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options); smuggler.Execute(); } }
private InternalBackupResult CreateBackup( DatabaseSmugglerOptionsServerSide options, string backupFilePath, long?startDocumentEtag, long?startRaftIndex) { // the last etag is already included in the last backup var currentBackupResults = new InternalBackupResult(); startDocumentEtag = startDocumentEtag == null ? 0 : ++startDocumentEtag; startRaftIndex = startRaftIndex == null ? 0 : ++startRaftIndex; using (Stream fileStream = File.Open(backupFilePath, FileMode.CreateNew)) using (var outputStream = GetOutputStream(fileStream)) using (_database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { var smugglerSource = new DatabaseSource(_database, startDocumentEtag.Value, startRaftIndex.Value, _logger); var smugglerDestination = new StreamDestination(outputStream, context, smugglerSource); var smuggler = new DatabaseSmuggler(_database, smugglerSource, smugglerDestination, _database.Time, options: options, result: _backupResult, onProgress: _onProgress, token: TaskCancelToken.Token); smuggler.Execute(); switch (outputStream) { case EncryptingXChaCha20Poly1305Stream encryptedStream: encryptedStream.Flush(flushToDisk: true); break; case FileStream file: file.Flush(flushToDisk: true); break; default: throw new InvalidOperationException($" {outputStream.GetType()} not supported"); } currentBackupResults.LastDocumentEtag = smugglerSource.LastEtag; currentBackupResults.LastDatabaseChangeVector = smugglerSource.LastDatabaseChangeVector; currentBackupResults.LastRaftIndex = smugglerSource.LastRaftIndex; return(currentBackupResults); } }
public void CanImportNorthwind(string file) { using (var inputStream = GetType().GetTypeInfo().Assembly.GetManifestResourceStream(file)) using (var stream = new GZipStream(inputStream, CompressionMode.Decompress)) { Assert.NotNull(stream); using (var database = CreateDocumentDatabase()) using (database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(stream, context)) { var destination = new DatabaseDestination(database); var smuggler = new DatabaseSmuggler(database, source, destination, database.Time, new DatabaseSmugglerOptionsServerSide { TransformScript = "this['Test'] = 'NewValue';" }); var result = smuggler.Execute(); Assert.Equal(1059, result.Documents.ReadCount); Assert.Equal(0, result.Documents.SkippedCount); Assert.Equal(0, result.Documents.ErroredCount); Assert.Equal(4, result.Indexes.ReadCount); Assert.Equal(0, result.Indexes.ErroredCount); Assert.Equal(0, result.RevisionDocuments.ReadCount); Assert.Equal(0, result.RevisionDocuments.ErroredCount); using (context.OpenReadTransaction()) { var countOfDocuments = database.DocumentsStorage.GetNumberOfDocuments(context); var countOfIndexes = database.IndexStore.GetIndexes().Count(); Assert.Equal(1059, countOfDocuments); Assert.Equal(3, countOfIndexes);// there are 4 in ravendbdump, but Raven/DocumentsByEntityName is skipped var doc = database.DocumentsStorage.Get(context, "orders/1"); string test; Assert.True(doc.Data.TryGet("Test", out test)); Assert.Equal("NewValue", test); } } } }
private async Task <SmugglerResult> MigrateDatabase(string json, bool readLegacyEtag) { var response = await RunWithAuthRetry(async() => { var url = $"{Options.ServerUrl}/databases/{Options.DatabaseName}/studio-tasks/exportDatabase"; var content = new StringContent(json, Encoding.UTF8, "application/json"); var request = new HttpRequestMessage(HttpMethod.Post, url) { Content = content }; var responseMessage = await Parameters.HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, Parameters.CancelToken.Token); return(responseMessage); }); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export database from server: {Options.ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) using (var stream = new GZipStream(responseStream, mode: CompressionMode.Decompress)) using (Parameters.Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(stream, context, Parameters.Database)) { var destination = new DatabaseDestination(Parameters.Database); var options = new DatabaseSmugglerOptionsServerSide { #pragma warning disable 618 ReadLegacyEtag = readLegacyEtag, #pragma warning restore 618 RemoveAnalyzers = Options.RemoveAnalyzers, TransformScript = Options.TransformScript, OperateOnTypes = Options.OperateOnTypes }; var smuggler = new DatabaseSmuggler(Parameters.Database, source, destination, Parameters.Database.Time, options, Parameters.Result, Parameters.OnProgress, Parameters.CancelToken.Token); return(smuggler.Execute()); } }
private async Task MigrateDocuments(string lastEtag) { var response = await RunWithAuthRetry(async() => { var url = $"{Options.ServerUrl}/databases/{Options.DatabaseName}/streams/docs?etag={lastEtag}"; var request = new HttpRequestMessage(HttpMethod.Get, url); var responseMessage = await Parameters.HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, Parameters.CancelToken.Token); return(responseMessage); }); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export documents from server: {Options.ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) using (Parameters.Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(responseStream, context, Parameters.Database)) { var destination = new DatabaseDestination(Parameters.Database); var options = new DatabaseSmugglerOptionsServerSide { #pragma warning disable 618 ReadLegacyEtag = true, #pragma warning restore 618 TransformScript = Options.TransformScript, OperateOnTypes = Options.OperateOnTypes }; var smuggler = new DatabaseSmuggler(Parameters.Database, source, destination, Parameters.Database.Time, options, Parameters.Result, Parameters.OnProgress, Parameters.CancelToken.Token); // since we will be migrating indexes as separate task don't ensureStepsProcessed at this point smuggler.Execute(ensureStepsProcessed: false); } }
private SmugglerResult CreateBackup(DatabaseSmugglerOptionsServerSide options, string backupFilePath, long?startDocumentEtag, DocumentsOperationContext context) { // the last etag is already included in the last backup startDocumentEtag = startDocumentEtag == null ? 0 : ++startDocumentEtag; SmugglerResult result; using (var file = File.Open(backupFilePath, FileMode.CreateNew)) { var smugglerSource = new DatabaseSource(_database, startDocumentEtag.Value); var smugglerDestination = new StreamDestination(file, context, smugglerSource); var smuggler = new DatabaseSmuggler(_database, smugglerSource, smugglerDestination, _database.Time, token: _cancellationToken.Token, options: options); result = smuggler.Execute(); } return(result); }
public void FullBackupTo(string backupPath) { using (var file = SafeFileStream.Create(backupPath, FileMode.Create)) using (var package = new ZipArchive(file, ZipArchiveMode.Create, leaveOpen: true)) using (_serverStore.ContextPool.AllocateOperationContext(out TransactionOperationContext context)) using (context.OpenReadTransaction()) { var databaseRecord = _serverStore.Cluster.ReadDatabase(context, Name); Debug.Assert(databaseRecord != null); var zipArchiveEntry = package.CreateEntry(RestoreSettings.SmugglerValuesFileName, CompressionLevel.Optimal); using (var zipStream = zipArchiveEntry.Open()) { var smugglerSource = new DatabaseSource(this, 0); using (DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext ctx)) using (ctx.OpenReadTransaction()) { var smugglerDestination = new StreamDestination(zipStream, ctx, smugglerSource); var databaseSmugglerOptionsServerSide = new DatabaseSmugglerOptionsServerSide { AuthorizationStatus = AuthorizationStatus.DatabaseAdmin, OperateOnTypes = DatabaseItemType.CompareExchange | DatabaseItemType.Identities }; var smuggler = new DatabaseSmuggler(this, smugglerSource, smugglerDestination, this.Time, options: databaseSmugglerOptionsServerSide); smuggler.Execute(); } } zipArchiveEntry = package.CreateEntry(RestoreSettings.SettingsFileName, CompressionLevel.Optimal); using (var zipStream = zipArchiveEntry.Open()) using (var writer = new BlittableJsonTextWriter(context, zipStream)) { //TODO: encrypt this file using the MasterKey //http://issues.hibernatingrhinos.com/issue/RavenDB-7546 writer.WriteStartObject(); // save the database record writer.WritePropertyName(nameof(RestoreSettings.DatabaseRecord)); var databaseRecordBlittable = EntityToBlittable.ConvertEntityToBlittable(databaseRecord, DocumentConventions.Default, context); context.Write(writer, databaseRecordBlittable); // save the database values (subscriptions, periodic backups statuses, etl states...) writer.WriteComma(); writer.WritePropertyName(nameof(RestoreSettings.DatabaseValues)); writer.WriteStartObject(); var first = true; var prefix = Helpers.ClusterStateMachineValuesPrefix(Name); foreach (var keyValue in ClusterStateMachine.ReadValuesStartingWith(context, prefix)) { if (first == false) { writer.WriteComma(); } first = false; var key = keyValue.Key.ToString().Substring(prefix.Length); writer.WritePropertyName(key); context.Write(writer, keyValue.Value); } writer.WriteEndObject(); // end of values writer.WriteEndObject(); } BackupMethods.Full.ToFile(GetAllStoragesForBackup(), package); file.Flush(true); // make sure that we fully flushed to disk } }
public async Task PostCreateSampleData() { using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { using (context.OpenReadTransaction()) { foreach (var collection in Database.DocumentsStorage.GetCollections(context)) { if (collection.Count > 0) { throw new InvalidOperationException("You cannot create sample data in a database that already contains documents"); } } } var operateOnTypesAsString = GetStringValuesQueryString("operateOnTypes", required: false); var operateOnTypes = GetOperateOnTypes(operateOnTypesAsString); if (operateOnTypes.HasFlag(DatabaseItemType.RevisionDocuments)) { var editRevisions = new EditRevisionsConfigurationCommand(new RevisionsConfiguration { Collections = new Dictionary <string, RevisionsCollectionConfiguration> { ["Orders"] = new RevisionsCollectionConfiguration { Disabled = false } } }, Database.Name, GetRaftRequestIdFromQuery() + "/revisions"); var(index, _) = await ServerStore.SendToLeaderAsync(editRevisions); await Database.RachisLogIndexNotifications.WaitForIndexNotification(index, Database.ServerStore.Engine.OperationTimeout); } if (operateOnTypes.HasFlag(DatabaseItemType.TimeSeries)) { var tsConfig = new TimeSeriesConfiguration { NamedValues = new Dictionary <string, Dictionary <string, string[]> > { ["Companies"] = new Dictionary <string, string[]> { ["StockPrices"] = new[] { "Open", "Close", "High", "Low", "Volume" } }, ["Employees"] = new Dictionary <string, string[]> { ["HeartRates"] = new[] { "BPM" } } } }; var editTimeSeries = new EditTimeSeriesConfigurationCommand(tsConfig, Database.Name, GetRaftRequestIdFromQuery() + "/time-series"); var(index, _) = await ServerStore.SendToLeaderAsync(editTimeSeries); await Database.RachisLogIndexNotifications.WaitForIndexNotification(index, Database.ServerStore.Engine.OperationTimeout); } using (var sampleData = typeof(SampleDataHandler).Assembly .GetManifestResourceStream("Raven.Server.Web.Studio.EmbeddedData.Northwind.ravendbdump")) { using (var stream = new GZipStream(sampleData, CompressionMode.Decompress)) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options: new DatabaseSmugglerOptionsServerSide { OperateOnTypes = operateOnTypes, SkipRevisionCreation = true }); smuggler.Execute(); } } await NoContent(); }