public async Task CanHandleFilesExceptionsGracefully() { using (var store = NewStore()) { var server = GetServer(); var outputDirectory = Path.Combine(server.Configuration.DataDirectory, "Export"); var alreadyReset = false; var proxyPort = 8070; var forwarder = new ProxyServer(ref proxyPort, server.Configuration.Port) { VetoTransfer = (totalRead, buffer) => { if (alreadyReset == false && totalRead > 28000) { alreadyReset = true; return true; } return false; } }; try { ReseedRandom(100); // Force a random distribution. await InitializeWithRandomFiles(store, 50, 30); // now perform full backup var dumper = new SmugglerFilesApi { Options = { Incremental = true } }; ExportFilesResult exportResult = null; try { // We will ensure this one will fail somewhere along the line. exportResult = await dumper.ExportData( new SmugglerExportOptions<FilesConnectionStringOptions> { ToFile = outputDirectory, From = new FilesConnectionStringOptions { Url = "http://localhost:" + proxyPort, DefaultFileSystem = store.DefaultFileSystem, } }); } catch (SmugglerExportException inner) { exportResult = new ExportFilesResult { FilePath = inner.File }; } Assert.NotNull(exportResult); Assert.True(!string.IsNullOrWhiteSpace(exportResult.FilePath)); // Continue with the incremental dump. exportResult = await dumper.ExportData( new SmugglerExportOptions<FilesConnectionStringOptions> { ToFile = outputDirectory, From = new FilesConnectionStringOptions { Url = server.Url, DefaultFileSystem = store.DefaultFileSystem, } }); // Import everything and verify all files are there. await VerifyDump(store, outputDirectory, s => { using (var session = s.OpenAsyncSession()) { var files = s.AsyncFilesCommands.BrowseAsync().Result; Assert.Equal(50, files.Count()); } }); } finally { forwarder.Dispose(); IOExtensions.DeleteDirectory(outputDirectory); } } }
public async Task CanHandleAttachmentExceptionsGracefully_Smuggler() { var backupPath = NewDataPath("BackupFolder"); var server = GetNewServer(); int allowDownload = 0; var port = 8070; var forwarder = new ProxyServer(ref port, 8079) { VetoTransfer = (totalRead, buffer) => { var payload = System.Text.Encoding.UTF8.GetString(buffer.Array, buffer.Offset, buffer.Count); return payload.Contains("GET /static/users/678 ") && Thread.VolatileRead(ref allowDownload) == 0; } }; try { string databaseName; using (var store = new DocumentStore { Url = "http://localhost:8079" }) { databaseName = store.DefaultDatabase; store.Initialize(); InsertAttachments(store, 2000); } var dumper = new SmugglerDatabaseApi { Options = { Limit = 1500, Incremental = true } }; var allAttachments = new List<RavenJObject>(); OperationState exportResult = null; try { exportResult = dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:" + port, DefaultDatabase = databaseName, } }).Result; Assert.False(true, "Previous op should throw."); } catch (AggregateException e) { var extractSingleInnerException = e.ExtractSingleInnerException() as SmugglerExportException; if (extractSingleInnerException == null) throw; var inner = extractSingleInnerException; exportResult = new OperationState { FilePath = inner.File }; } Interlocked.Increment(ref allowDownload); using (var fileStream = new FileStream(exportResult.FilePath, FileMode.Open)) using (var stream = new GZipStream(fileStream, CompressionMode.Decompress)) { var chunk1 = RavenJToken.TryLoad(stream) as RavenJObject; var att1 = chunk1["Attachments"] as RavenJArray; allAttachments.AddRange(att1.Values<RavenJObject>()); } exportResult = await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:8070", DefaultDatabase = databaseName, } }); using (var fileStream = new FileStream(exportResult.FilePath, FileMode.Open)) using (var stream = new GZipStream(fileStream, CompressionMode.Decompress)) { var chunk2 = RavenJToken.TryLoad(stream) as RavenJObject; var attr2 = chunk2["Attachments"] as RavenJArray; allAttachments.AddRange(attr2.Values<RavenJObject>()); } Assert.Equal(2000, allAttachments.Count()); } finally { IOExtensions.DeleteDirectory(backupPath); forwarder.Dispose(); server.Dispose(); } }
public async Task BetweenOperation_CanHandleFilesExceptionsGracefully() { using (var store = NewStore()) { store.DefaultFileSystem = SourceFilesystem; var server = GetServer(); var alreadyReset = false; var port = 8070; var forwarder = new ProxyServer(ref port, server.Configuration.Port) { VetoTransfer = (totalRead, buffer) => { var s = Encoding.UTF8.GetString(buffer.Array, buffer.Offset, buffer.Count); if (alreadyReset == false && totalRead > 28000 && !s.Contains("200 OK")) { alreadyReset = true; return true; } return false; } }; try { var smugglerApi = new SmugglerFilesApi(); var options = new SmugglerBetweenOptions<FilesConnectionStringOptions> { From = new FilesConnectionStringOptions { Url = "http://localhost:" + port, DefaultFileSystem = SourceFilesystem }, To = new FilesConnectionStringOptions { Url = store.Url, DefaultFileSystem = DestinationFilesystem } }; await store.AsyncFilesCommands.Admin.EnsureFileSystemExistsAsync(SourceFilesystem); await store.AsyncFilesCommands.Admin.EnsureFileSystemExistsAsync(DestinationFilesystem); ReseedRandom(100); // Force a random distribution. await InitializeWithRandomFiles(store, 20, 30); Etag lastEtag = Etag.InvalidEtag; try { await smugglerApi.Between(options); Assert.False(true, "Expected error to happen during this Between operation, but it didn't happen :-("); } catch (SmugglerExportException inner) { lastEtag = inner.LastEtag; } Assert.NotEqual(Etag.InvalidEtag, lastEtag); await smugglerApi.Between(options); using (var session = store.OpenAsyncSession(DestinationFilesystem)) { var files = await session.Commands.BrowseAsync(); Assert.Equal(20, files.Count()); } } finally { forwarder.Dispose(); server.Dispose(); } } }
public async Task CanHandleDocumentExceptionsGracefully_Smuggler() { var backupPath = NewDataPath("BackupFolder"); var server = GetNewServer(databaseName: Constants.SystemDatabase); var alreadyReset = false; var port = 8070; var forwarder = new ProxyServer(ref port, 8079) { VetoTransfer = (totalRead, buffer) => { if (alreadyReset == false && totalRead > 25000) { alreadyReset = true; return true; } return false; } }; try { string databaseName; using (var store = new DocumentStore { Url = "http://localhost:8079" }) { databaseName = store.DefaultDatabase; store.Initialize(); InsertUsers(store, 0, 2000); } var dumper = new SmugglerDatabaseApi { Options = { Limit = 1900, Incremental = true } }; var allDocs = new List<RavenJObject>(); OperationState exportResult = null; try { exportResult = await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:" + port, DefaultDatabase = databaseName, } }); Assert.False(true, "Previous op should throw."); } catch (SmugglerExportException e) { exportResult = new OperationState { FilePath = e.File }; } using (var fileSteam = new FileStream(exportResult.FilePath, FileMode.Open)) using (var stream = new GZipStream(fileSteam, CompressionMode.Decompress)) { var chunk1 = RavenJToken.TryLoad(stream) as RavenJObject; var doc1 = chunk1["Docs"] as RavenJArray; allDocs.AddRange(doc1.Values<RavenJObject>()); } exportResult = await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:8070", DefaultDatabase = databaseName, } }); using (var fileStream = new FileStream(exportResult.FilePath, FileMode.Open)) using (var stream = new GZipStream(fileStream, CompressionMode.Decompress)) { var chunk2 = RavenJToken.TryLoad(stream) as RavenJObject; var doc2 = chunk2["Docs"] as RavenJArray; allDocs.AddRange(doc2.Values<RavenJObject>()); } Assert.Equal(2000, allDocs.Count(d => (d.Value<string>("Name") ?? String.Empty).StartsWith("User"))); } finally { forwarder.Dispose(); server.Dispose(); IOExtensions.DeleteDirectory(backupPath); } }