public void CanBackupToDirectory() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey) != null, 10000); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public void DisablingBackupShouldCauseLocalFolderBackupsToStop() { var backupPath = NewDataPath("BackupFolder", forceCreateDir: true); using (var store = NewDocumentStore(activeBundles: "PeriodicExport")) { using (var session = store.OpenSession()) { session.Store(new PeriodicBackupTests.User { Name = "oren" }); var periodicExportSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25, Disabled = true }; session.Store(periodicExportSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } Thread.Sleep(5000); Assert.Null(store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey)); Assert.Equal(0, Directory.GetFiles(backupPath).Length); Assert.Equal(0, Directory.GetDirectories(backupPath).Length); } }
protected bool Equals(PeriodicExportSetup other) { return string.Equals(Disabled, other.Disabled) && string.Equals(GlacierVaultName, other.GlacierVaultName) && string.Equals(S3BucketName, other.S3BucketName) && string.Equals(AwsRegionEndpoint, other.AwsRegionEndpoint) && string.Equals(AzureStorageContainer, other.AzureStorageContainer) && string.Equals(LocalFolderName, other.LocalFolderName) && IntervalMilliseconds == other.IntervalMilliseconds && FullBackupIntervalMilliseconds == other.FullBackupIntervalMilliseconds; }
public void CanBackupToDirectory_MultipleBackups() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => { var jsonDocument = store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey); if (jsonDocument == null) return false; var periodicBackupStatus = jsonDocument.DataAsJson.JsonDeserialization<PeriodicExportStatus>(); return periodicBackupStatus.LastDocsEtag != Etag.Empty && periodicBackupStatus.LastDocsEtag != null; }); var etagForBackups= store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey).Etag; using (var session = store.OpenSession()) { session.Store(new User { Name = "ayende" }); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = backupPath, }, new SmugglerOptions { Incremental = true, }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); Assert.Equal("ayende", session.Load<User>(2).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public void CanFullBackupToDirectory() { var backupPath = NewDataPath("BackupFolder", forceCreateDir: true); try { using (var store = NewDocumentStore()) { store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1,2,3,4,5 }), new RavenJObject()); using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, FullBackupIntervalMilliseconds = 500 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } WaitForNextFullBackup(store); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = Directory.GetFiles(Path.GetFullPath(backupPath)) .Where(file => ".ravendb-full-dump".Equals(Path.GetExtension(file), StringComparison.InvariantCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc).First() }, new SmugglerOptions { Incremental = false }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); Assert.NotNull(store.DatabaseCommands.GetAttachment("attach/1")); } } } finally { IOExtensions.DeleteDirectory(backupPath); } }
public void CanBackupDocumentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { string userId; using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 100 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPeriodicBackupStatus(store.SystemDatabase); using (var session = store.OpenSession()) { var user = new User { Name = "oren" }; session.Store(user); userId = user.Id; session.SaveChanges(); } WaitForPeriodicExport(store.SystemDatabase, backupStatus); store.DatabaseCommands.Delete(userId, null); WaitForPeriodicExport(store.SystemDatabase, backupStatus, x => x.LastDocsDeletionEtag); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Null(session.Load<User>(1)); } } IOExtensions.DeleteDirectory(backupPath); }
private void ReadSetupValuesFromDocument() { using (LogContext.WithDatabase(Database.Name)) { try { // Not having a setup doc means this DB isn't enabled for periodic exports var document = Database.Documents.Get(PeriodicExportSetup.RavenDocumentKey, null); if (document == null) { exportConfigs = null; exportStatus = null; return; } var status = Database.Documents.Get(PeriodicExportStatus.RavenDocumentKey, null); exportStatus = status == null ? new PeriodicExportStatus() : status.DataAsJson.JsonDeserialization<PeriodicExportStatus>(); exportConfigs = document.DataAsJson.JsonDeserialization<PeriodicExportSetup>(); awsAccessKey = Database.Configuration.Settings["Raven/AWSAccessKey"]; awsSecretKey = Database.Configuration.Settings["Raven/AWSSecretKey"]; azureStorageAccount = Database.Configuration.Settings["Raven/AzureStorageAccount"]; azureStorageKey = Database.Configuration.Settings["Raven/AzureStorageKey"]; if (exportConfigs.IntervalMilliseconds.GetValueOrDefault() > 0) { var interval = TimeSpan.FromMilliseconds(exportConfigs.IntervalMilliseconds.GetValueOrDefault()); logger.Info("Incremental periodic export started, will export every" + interval.TotalMinutes + "minutes"); var timeSinceLastBackup = SystemTime.UtcNow - exportStatus.LastBackup; var nextBackup = timeSinceLastBackup >= interval ? TimeSpan.Zero : interval - timeSinceLastBackup; incrementalBackupTimer = new Timer(state => TimerCallback(false), null, nextBackup, interval); } else { logger.Warn("Incremental periodic export interval is set to zero or less, incremental periodic export is now disabled"); } if (exportConfigs.FullBackupIntervalMilliseconds.GetValueOrDefault() > 0) { var interval = TimeSpan.FromMilliseconds(exportConfigs.FullBackupIntervalMilliseconds.GetValueOrDefault()); logger.Info("Full periodic export started, will export every" + interval.TotalMinutes + "minutes"); var timeSinceLastBackup = SystemTime.UtcNow - exportStatus.LastFullBackup; var nextBackup = timeSinceLastBackup >= interval ? TimeSpan.Zero : interval - timeSinceLastBackup; fullBackupTimer = new Timer(state => TimerCallback(true), null, nextBackup, interval); } else { logger.Warn("Full periodic export interval is set to zero or less, full periodic export is now disabled"); } } catch (Exception ex) { logger.ErrorException("Could not read periodic export config", ex); Database.AddAlert(new Alert { AlertLevel = AlertLevel.Error, CreatedAt = SystemTime.UtcNow, Message = ex.Message, Title = "Could not read periodic export config", Exception = ex.ToString(), UniqueKey = "Periodic Export Config Error" }); } } }
private void UploadToAzure(string backupPath, PeriodicExportSetup localExportConfigs, bool isFullBackup) { if (azureStorageAccount == Constants.DataCouldNotBeDecrypted || azureStorageKey == Constants.DataCouldNotBeDecrypted) { throw new InvalidOperationException("Could not decrypt the AWS access settings, if you are running on IIS, make sure that load user profile is set to true."); } using (var client = new RavenAzureClient(azureStorageAccount, azureStorageKey, false)) { client.PutContainer(localExportConfigs.AzureStorageContainer); using (var fileStream = File.OpenRead(backupPath)) { var key = Path.GetFileName(backupPath); client.PutBlob(localExportConfigs.AzureStorageContainer, key, fileStream, new Dictionary<string, string> { { "Description", GetArchiveDescription(isFullBackup) } }); logger.Info(string.Format( "Successfully uploaded backup {0} to Azure container {1}, with key {2}", Path.GetFileName(backupPath), localExportConfigs.AzureStorageContainer, key)); } } }
private void UploadToGlacier(string backupPath, PeriodicExportSetup localExportConfigs, bool isFullBackup) { if (awsAccessKey == Constants.DataCouldNotBeDecrypted || awsSecretKey == Constants.DataCouldNotBeDecrypted) { throw new InvalidOperationException("Could not decrypt the AWS access settings, if you are running on IIS, make sure that load user profile is set to true."); } using (var client = new RavenAwsGlacierClient(awsAccessKey, awsSecretKey, localExportConfigs.AwsRegionEndpoint ?? RavenAwsClient.DefaultRegion)) using (var fileStream = File.OpenRead(backupPath)) { var archiveId = client.UploadArchive(localExportConfigs.GlacierVaultName, fileStream, GetArchiveDescription(isFullBackup), 60 * 60); logger.Info(string.Format("Successfully uploaded backup {0} to Glacier, archive ID: {1}", Path.GetFileName(backupPath), archiveId)); } }
private void UploadToS3(string backupPath, PeriodicExportSetup localExportConfigs, bool isFullBackup) { if (awsAccessKey == Constants.DataCouldNotBeDecrypted || awsSecretKey == Constants.DataCouldNotBeDecrypted) { throw new InvalidOperationException("Could not decrypt the AWS access settings, if you are running on IIS, make sure that load user profile is set to true."); } using (var client = new RavenAwsS3Client(awsAccessKey, awsSecretKey, localExportConfigs.AwsRegionEndpoint ?? RavenAwsClient.DefaultRegion)) using (var fileStream = File.OpenRead(backupPath)) { var key = Path.GetFileName(backupPath); client.PutObject(localExportConfigs.S3BucketName, key, fileStream, new Dictionary<string, string> { { "Description", GetArchiveDescription(isFullBackup) } }, 60 * 60); logger.Info(string.Format("Successfully uploaded backup {0} to S3 bucket {1}, with key {2}", Path.GetFileName(backupPath), localExportConfigs.S3BucketName, key)); } }
private void UploadToServer(string backupPath, PeriodicExportSetup localExportConfigs, bool isFullBackup) { if (!string.IsNullOrWhiteSpace(localExportConfigs.GlacierVaultName)) { UploadToGlacier(backupPath, localExportConfigs, isFullBackup); } else if (!string.IsNullOrWhiteSpace(localExportConfigs.S3BucketName)) { UploadToS3(backupPath, localExportConfigs, isFullBackup); } else if (!string.IsNullOrWhiteSpace(localExportConfigs.AzureStorageContainer)) { UploadToAzure(backupPath, localExportConfigs, isFullBackup); } }
public void PeriodicBackupDoesntProduceExcessiveFilesAndCleanupTombstonesProperly() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 250 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPeriodicBackupStatus(store.SystemDatabase); using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); session.Store(new User { Name = "ayende" }); store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }), new RavenJObject()); store.DatabaseCommands.PutAttachment("attach/2", null, new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }), new RavenJObject()); session.SaveChanges(); } WaitForPeriodicExport(store.SystemDatabase, backupStatus); // status + one export VerifyFilesCount(1 + 1, backupPath); store.DatabaseCommands.Delete("users/1", null); store.DatabaseCommands.Delete("users/2", null); store.DatabaseCommands.DeleteAttachment("attach/1", null); store.DatabaseCommands.DeleteAttachment("attach/2", null); store.SystemDatabase.TransactionalStorage.Batch(accessor => { Assert.Equal(2, accessor.Lists.Read(Constants.RavenPeriodicExportsDocsTombstones, Etag.Empty, null, 20) .Count()); Assert.Equal(2, accessor.Lists.Read(Constants.RavenPeriodicExportsAttachmentsTombstones, Etag.Empty, null, 20) .Count()); }); WaitForPeriodicExport(store.SystemDatabase, backupStatus); // status + two exports VerifyFilesCount(1 + 2, backupPath); store.SystemDatabase.TransactionalStorage.Batch(accessor => { Assert.Equal(1, accessor.Lists.Read(Constants.RavenPeriodicExportsDocsTombstones, Etag.Empty, null, 20) .Count()); Assert.Equal(1, accessor.Lists.Read(Constants.RavenPeriodicExportsAttachmentsTombstones, Etag.Empty, null, 20) .Count()); }); } IOExtensions.DeleteDirectory(backupPath); }
public void CanBackupAttachmentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 250 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPeriodicBackupStatus(store.SystemDatabase); store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4 }), new RavenJObject()); WaitForPeriodicExport(store.SystemDatabase, backupStatus); store.DatabaseCommands.DeleteAttachment("attach/1", null); WaitForPeriodicExport(store.SystemDatabase, backupStatus); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); Assert.Null(store.DatabaseCommands.GetAttachment("attach/1")); } IOExtensions.DeleteDirectory(backupPath); }
private void UploadToS3(string backupPath, PeriodicExportSetup localExportConfigs, bool isFullBackup) { using (var client = new RavenAwsS3Client(awsAccessKey, awsSecretKey, localExportConfigs.AwsRegionEndpoint ?? RavenAwsClient.DefaultRegion)) using (var fileStream = File.OpenRead(backupPath)) { var key = Path.GetFileName(backupPath); client.PutObject(localExportConfigs.S3BucketName, key, fileStream, new Dictionary<string, string> { { "Description", GetArchiveDescription(isFullBackup) } }, 60 * 60); logger.Info(string.Format("Successfully uploaded backup {0} to S3 bucket {1}, with key {2}", Path.GetFileName(backupPath), localExportConfigs.S3BucketName, key)); } }
public void SmugglerCanUnderstandPeriodicBackupFormat() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { string userId; using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 100 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPeriodicBackupStatus(store.SystemDatabase); using (var session = store.OpenSession()) { var user = new User { Name = "oren" }; session.Store(user); userId = user.Id; session.SaveChanges(); } store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4 }), new RavenJObject()); WaitForPeriodicExport(store.SystemDatabase, backupStatus); store.DatabaseCommands.Delete(userId, null); store.DatabaseCommands.DeleteAttachment("attach/1", null); WaitForPeriodicExport(store.SystemDatabase, backupStatus); } using (var store = NewRemoteDocumentStore()) { var dataDumper = new SmugglerDatabaseApi(); dataDumper.Options.Incremental = true; dataDumper.ImportData( new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath, To = new RavenConnectionStringOptions { Url = store.Url } }).Wait(); using (var session = store.OpenSession()) { Assert.Null(session.Load<User>(1)); Assert.Null(store.DatabaseCommands.GetAttachment("attach/1")); } } IOExtensions.DeleteDirectory(backupPath); }
public void CanFullBackupToDirectory_MultipleBackups() { var backupPath = NewDataPath("BackupFolder", forceCreateDir: true); try { using (var store = NewDocumentStore()) { store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }), new RavenJObject()); using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, FullBackupIntervalMilliseconds = 250 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } WaitForNextFullBackup(store); // we have first backup finished here, now insert second object using (var session = store.OpenSession()) { session.Store(new User { Name = "ayende" }); session.SaveChanges(); } WaitForNextFullBackup(store); } var files = Directory.GetFiles(Path.GetFullPath(backupPath)) .Where( f => ".ravendb-full-dump".Equals(Path.GetExtension(f), StringComparison.InvariantCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc).ToList(); AssertUsersCountInBackup(1, files.First()); AssertUsersCountInBackup(2, files.Last()); } finally { IOExtensions.DeleteDirectory(backupPath); } }
private void UploadToAzure(string backupPath, PeriodicExportSetup localExportConfigs, bool isFullBackup) { using (var client = new RavenAzureClient(azureStorageAccount, azureStorageKey, true)) { client.PutContainer(localExportConfigs.AzureStorageContainer); using (var fileStream = File.OpenRead(backupPath)) { var key = Path.GetFileName(backupPath); client.PutBlob(localExportConfigs.AzureStorageContainer, key, fileStream, new Dictionary<string, string> { { "Description", GetArchiveDescription(isFullBackup) } }); logger.Info(string.Format( "Successfully uploaded backup {0} to Azure container {1}, with key {2}", Path.GetFileName(backupPath), localExportConfigs.AzureStorageContainer, key)); } } }
private void UploadToGlacier(string backupPath, PeriodicExportSetup localExportConfigs, bool isFullBackup) { using (var client = new RavenAwsGlacierClient(awsAccessKey, awsSecretKey, localExportConfigs.AwsRegionEndpoint ?? RavenAwsClient.DefaultRegion)) using (var fileStream = File.OpenRead(backupPath)) { var archiveId = client.UploadArchive(localExportConfigs.GlacierVaultName, fileStream, GetArchiveDescription(isFullBackup), 60 * 60); logger.Info(string.Format("Successfully uploaded backup {0} to Glacier, archive ID: {1}", Path.GetFileName(backupPath), archiveId)); } }